| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 5.0, | |
| "eval_steps": 500, | |
| "global_step": 570, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.008771929824561403, | |
| "grad_norm": 39.56407165527344, | |
| "learning_rate": 5.0000000000000004e-08, | |
| "loss": 5.1375, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.017543859649122806, | |
| "grad_norm": 40.30452346801758, | |
| "learning_rate": 1.0000000000000001e-07, | |
| "loss": 5.1185, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.02631578947368421, | |
| "grad_norm": 40.062313079833984, | |
| "learning_rate": 1.5000000000000002e-07, | |
| "loss": 5.0762, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.03508771929824561, | |
| "grad_norm": 39.17148208618164, | |
| "learning_rate": 2.0000000000000002e-07, | |
| "loss": 5.016, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.043859649122807015, | |
| "grad_norm": 40.67367172241211, | |
| "learning_rate": 2.5000000000000004e-07, | |
| "loss": 5.0428, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.05263157894736842, | |
| "grad_norm": 38.18095016479492, | |
| "learning_rate": 3.0000000000000004e-07, | |
| "loss": 5.2025, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.06140350877192982, | |
| "grad_norm": 39.12940979003906, | |
| "learning_rate": 3.5000000000000004e-07, | |
| "loss": 4.9896, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.07017543859649122, | |
| "grad_norm": 38.84568405151367, | |
| "learning_rate": 4.0000000000000003e-07, | |
| "loss": 5.1078, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.07894736842105263, | |
| "grad_norm": 39.38333511352539, | |
| "learning_rate": 4.5000000000000003e-07, | |
| "loss": 5.0808, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.08771929824561403, | |
| "grad_norm": 39.427650451660156, | |
| "learning_rate": 5.000000000000001e-07, | |
| "loss": 5.0534, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.09649122807017543, | |
| "grad_norm": 39.29513168334961, | |
| "learning_rate": 5.5e-07, | |
| "loss": 5.058, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.10526315789473684, | |
| "grad_norm": 39.641231536865234, | |
| "learning_rate": 6.000000000000001e-07, | |
| "loss": 5.0317, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.11403508771929824, | |
| "grad_norm": 37.91259765625, | |
| "learning_rate": 6.5e-07, | |
| "loss": 4.912, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.12280701754385964, | |
| "grad_norm": 38.203548431396484, | |
| "learning_rate": 7.000000000000001e-07, | |
| "loss": 4.9705, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.13157894736842105, | |
| "grad_norm": 39.15998840332031, | |
| "learning_rate": 7.5e-07, | |
| "loss": 4.6962, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.14035087719298245, | |
| "grad_norm": 37.754669189453125, | |
| "learning_rate": 8.000000000000001e-07, | |
| "loss": 4.6262, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.14912280701754385, | |
| "grad_norm": 35.871490478515625, | |
| "learning_rate": 8.500000000000001e-07, | |
| "loss": 4.5422, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.15789473684210525, | |
| "grad_norm": 36.16888427734375, | |
| "learning_rate": 9.000000000000001e-07, | |
| "loss": 4.664, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.16666666666666666, | |
| "grad_norm": 33.520118713378906, | |
| "learning_rate": 9.500000000000001e-07, | |
| "loss": 4.4697, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.17543859649122806, | |
| "grad_norm": 30.896282196044922, | |
| "learning_rate": 1.0000000000000002e-06, | |
| "loss": 4.3568, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.18421052631578946, | |
| "grad_norm": 29.944643020629883, | |
| "learning_rate": 1.0500000000000001e-06, | |
| "loss": 4.2269, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.19298245614035087, | |
| "grad_norm": 25.224485397338867, | |
| "learning_rate": 1.1e-06, | |
| "loss": 4.1272, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.20175438596491227, | |
| "grad_norm": 24.410480499267578, | |
| "learning_rate": 1.1500000000000002e-06, | |
| "loss": 4.0585, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.21052631578947367, | |
| "grad_norm": 21.480648040771484, | |
| "learning_rate": 1.2000000000000002e-06, | |
| "loss": 3.9472, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.21929824561403508, | |
| "grad_norm": 20.61946678161621, | |
| "learning_rate": 1.25e-06, | |
| "loss": 3.8879, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.22807017543859648, | |
| "grad_norm": 19.578271865844727, | |
| "learning_rate": 1.3e-06, | |
| "loss": 3.6783, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.23684210526315788, | |
| "grad_norm": 17.418983459472656, | |
| "learning_rate": 1.3500000000000002e-06, | |
| "loss": 3.6826, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.24561403508771928, | |
| "grad_norm": 18.160301208496094, | |
| "learning_rate": 1.4000000000000001e-06, | |
| "loss": 3.478, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.2543859649122807, | |
| "grad_norm": 17.573204040527344, | |
| "learning_rate": 1.45e-06, | |
| "loss": 3.459, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.2631578947368421, | |
| "grad_norm": 17.1265869140625, | |
| "learning_rate": 1.5e-06, | |
| "loss": 3.3999, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.2719298245614035, | |
| "grad_norm": 15.527145385742188, | |
| "learning_rate": 1.5500000000000002e-06, | |
| "loss": 3.2817, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.2807017543859649, | |
| "grad_norm": 14.773847579956055, | |
| "learning_rate": 1.6000000000000001e-06, | |
| "loss": 3.234, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.2894736842105263, | |
| "grad_norm": 12.039301872253418, | |
| "learning_rate": 1.6500000000000003e-06, | |
| "loss": 3.132, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.2982456140350877, | |
| "grad_norm": 9.217979431152344, | |
| "learning_rate": 1.7000000000000002e-06, | |
| "loss": 3.0548, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.30701754385964913, | |
| "grad_norm": 7.575639724731445, | |
| "learning_rate": 1.75e-06, | |
| "loss": 2.9529, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.3157894736842105, | |
| "grad_norm": 7.496004104614258, | |
| "learning_rate": 1.8000000000000001e-06, | |
| "loss": 2.8967, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.32456140350877194, | |
| "grad_norm": 7.45414924621582, | |
| "learning_rate": 1.85e-06, | |
| "loss": 2.8837, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.3333333333333333, | |
| "grad_norm": 8.555658340454102, | |
| "learning_rate": 1.9000000000000002e-06, | |
| "loss": 2.7473, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.34210526315789475, | |
| "grad_norm": 10.03805160522461, | |
| "learning_rate": 1.9500000000000004e-06, | |
| "loss": 2.7355, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.3508771929824561, | |
| "grad_norm": 9.30649471282959, | |
| "learning_rate": 2.0000000000000003e-06, | |
| "loss": 2.6587, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.35964912280701755, | |
| "grad_norm": 8.510339736938477, | |
| "learning_rate": 2.05e-06, | |
| "loss": 2.5977, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.3684210526315789, | |
| "grad_norm": 4.709080696105957, | |
| "learning_rate": 2.1000000000000002e-06, | |
| "loss": 2.6286, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.37719298245614036, | |
| "grad_norm": 5.128961086273193, | |
| "learning_rate": 2.15e-06, | |
| "loss": 2.4558, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.38596491228070173, | |
| "grad_norm": 5.190136432647705, | |
| "learning_rate": 2.2e-06, | |
| "loss": 2.4432, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.39473684210526316, | |
| "grad_norm": 4.893551349639893, | |
| "learning_rate": 2.25e-06, | |
| "loss": 2.4939, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.40350877192982454, | |
| "grad_norm": 5.2434983253479, | |
| "learning_rate": 2.3000000000000004e-06, | |
| "loss": 2.3381, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.41228070175438597, | |
| "grad_norm": 5.122412204742432, | |
| "learning_rate": 2.35e-06, | |
| "loss": 2.313, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.42105263157894735, | |
| "grad_norm": 4.577274799346924, | |
| "learning_rate": 2.4000000000000003e-06, | |
| "loss": 2.2236, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.4298245614035088, | |
| "grad_norm": 4.722769737243652, | |
| "learning_rate": 2.4500000000000003e-06, | |
| "loss": 2.1987, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.43859649122807015, | |
| "grad_norm": 5.059235095977783, | |
| "learning_rate": 2.5e-06, | |
| "loss": 2.1415, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.4473684210526316, | |
| "grad_norm": 4.454439640045166, | |
| "learning_rate": 2.55e-06, | |
| "loss": 2.0466, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.45614035087719296, | |
| "grad_norm": 4.94586706161499, | |
| "learning_rate": 2.6e-06, | |
| "loss": 1.8762, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.4649122807017544, | |
| "grad_norm": 4.704402446746826, | |
| "learning_rate": 2.6500000000000005e-06, | |
| "loss": 1.8012, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.47368421052631576, | |
| "grad_norm": 6.125903129577637, | |
| "learning_rate": 2.7000000000000004e-06, | |
| "loss": 1.7669, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.4824561403508772, | |
| "grad_norm": 4.5356059074401855, | |
| "learning_rate": 2.7500000000000004e-06, | |
| "loss": 1.6607, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.49122807017543857, | |
| "grad_norm": 6.56803035736084, | |
| "learning_rate": 2.8000000000000003e-06, | |
| "loss": 1.6291, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 4.910050392150879, | |
| "learning_rate": 2.85e-06, | |
| "loss": 1.5545, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.5087719298245614, | |
| "grad_norm": 8.733433723449707, | |
| "learning_rate": 2.9e-06, | |
| "loss": 1.4206, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.5175438596491229, | |
| "grad_norm": 8.582486152648926, | |
| "learning_rate": 2.95e-06, | |
| "loss": 1.3912, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.5263157894736842, | |
| "grad_norm": 13.710689544677734, | |
| "learning_rate": 3e-06, | |
| "loss": 1.3297, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.5350877192982456, | |
| "grad_norm": 23.400312423706055, | |
| "learning_rate": 3.05e-06, | |
| "loss": 1.296, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.543859649122807, | |
| "grad_norm": 5.678805351257324, | |
| "learning_rate": 3.1000000000000004e-06, | |
| "loss": 1.2259, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.5526315789473685, | |
| "grad_norm": 14.700899124145508, | |
| "learning_rate": 3.1500000000000003e-06, | |
| "loss": 1.1087, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.5614035087719298, | |
| "grad_norm": 19.38919448852539, | |
| "learning_rate": 3.2000000000000003e-06, | |
| "loss": 1.1805, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.5701754385964912, | |
| "grad_norm": 8.460039138793945, | |
| "learning_rate": 3.2500000000000002e-06, | |
| "loss": 1.0963, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.5789473684210527, | |
| "grad_norm": 13.371014595031738, | |
| "learning_rate": 3.3000000000000006e-06, | |
| "loss": 1.0627, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.5877192982456141, | |
| "grad_norm": 22.380569458007812, | |
| "learning_rate": 3.3500000000000005e-06, | |
| "loss": 1.0869, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.5964912280701754, | |
| "grad_norm": 5.780513286590576, | |
| "learning_rate": 3.4000000000000005e-06, | |
| "loss": 0.9991, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.6052631578947368, | |
| "grad_norm": 19.850841522216797, | |
| "learning_rate": 3.45e-06, | |
| "loss": 0.9683, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.6140350877192983, | |
| "grad_norm": 17.160703659057617, | |
| "learning_rate": 3.5e-06, | |
| "loss": 0.845, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.6228070175438597, | |
| "grad_norm": 14.264311790466309, | |
| "learning_rate": 3.5500000000000003e-06, | |
| "loss": 0.8059, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.631578947368421, | |
| "grad_norm": 26.39459991455078, | |
| "learning_rate": 3.6000000000000003e-06, | |
| "loss": 0.85, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.6403508771929824, | |
| "grad_norm": 51.10348892211914, | |
| "learning_rate": 3.65e-06, | |
| "loss": 0.9755, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.6491228070175439, | |
| "grad_norm": 28.795856475830078, | |
| "learning_rate": 3.7e-06, | |
| "loss": 0.8966, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.6578947368421053, | |
| "grad_norm": 4.6617937088012695, | |
| "learning_rate": 3.7500000000000005e-06, | |
| "loss": 0.7716, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.6666666666666666, | |
| "grad_norm": 15.729666709899902, | |
| "learning_rate": 3.8000000000000005e-06, | |
| "loss": 0.7578, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.6754385964912281, | |
| "grad_norm": 7.109970569610596, | |
| "learning_rate": 3.85e-06, | |
| "loss": 0.7055, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.6842105263157895, | |
| "grad_norm": 20.84659194946289, | |
| "learning_rate": 3.900000000000001e-06, | |
| "loss": 0.7458, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.6929824561403509, | |
| "grad_norm": 21.601303100585938, | |
| "learning_rate": 3.95e-06, | |
| "loss": 0.6879, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.7017543859649122, | |
| "grad_norm": 3.6914751529693604, | |
| "learning_rate": 4.000000000000001e-06, | |
| "loss": 0.6179, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.7105263157894737, | |
| "grad_norm": 16.539325714111328, | |
| "learning_rate": 4.05e-06, | |
| "loss": 0.5716, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.7192982456140351, | |
| "grad_norm": 13.931925773620605, | |
| "learning_rate": 4.1e-06, | |
| "loss": 0.558, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.7280701754385965, | |
| "grad_norm": 10.52951717376709, | |
| "learning_rate": 4.15e-06, | |
| "loss": 0.6018, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.7368421052631579, | |
| "grad_norm": 17.337060928344727, | |
| "learning_rate": 4.2000000000000004e-06, | |
| "loss": 0.5501, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.7456140350877193, | |
| "grad_norm": 13.500468254089355, | |
| "learning_rate": 4.25e-06, | |
| "loss": 0.5214, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.7543859649122807, | |
| "grad_norm": 10.290645599365234, | |
| "learning_rate": 4.3e-06, | |
| "loss": 0.4996, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.7631578947368421, | |
| "grad_norm": 9.757556915283203, | |
| "learning_rate": 4.350000000000001e-06, | |
| "loss": 0.498, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.7719298245614035, | |
| "grad_norm": 9.325140953063965, | |
| "learning_rate": 4.4e-06, | |
| "loss": 0.4721, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.7807017543859649, | |
| "grad_norm": 2.9322128295898438, | |
| "learning_rate": 4.450000000000001e-06, | |
| "loss": 0.4528, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.7894736842105263, | |
| "grad_norm": 10.484073638916016, | |
| "learning_rate": 4.5e-06, | |
| "loss": 0.445, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.7982456140350878, | |
| "grad_norm": 32.7827262878418, | |
| "learning_rate": 4.5500000000000005e-06, | |
| "loss": 0.5105, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.8070175438596491, | |
| "grad_norm": 2.8477306365966797, | |
| "learning_rate": 4.600000000000001e-06, | |
| "loss": 0.4117, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.8157894736842105, | |
| "grad_norm": 2.7680225372314453, | |
| "learning_rate": 4.65e-06, | |
| "loss": 0.3653, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.8245614035087719, | |
| "grad_norm": 2.6512742042541504, | |
| "learning_rate": 4.7e-06, | |
| "loss": 0.3878, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.8333333333333334, | |
| "grad_norm": 6.453914165496826, | |
| "learning_rate": 4.75e-06, | |
| "loss": 0.3611, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.8421052631578947, | |
| "grad_norm": 3.4594080448150635, | |
| "learning_rate": 4.800000000000001e-06, | |
| "loss": 0.3817, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.8508771929824561, | |
| "grad_norm": 3.6144917011260986, | |
| "learning_rate": 4.85e-06, | |
| "loss": 0.3618, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.8596491228070176, | |
| "grad_norm": 5.349407196044922, | |
| "learning_rate": 4.9000000000000005e-06, | |
| "loss": 0.3218, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.868421052631579, | |
| "grad_norm": 13.671236991882324, | |
| "learning_rate": 4.95e-06, | |
| "loss": 0.3329, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.8771929824561403, | |
| "grad_norm": 5.84046745300293, | |
| "learning_rate": 5e-06, | |
| "loss": 0.2967, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.8859649122807017, | |
| "grad_norm": 14.005338668823242, | |
| "learning_rate": 4.999963827125897e-06, | |
| "loss": 0.303, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.8947368421052632, | |
| "grad_norm": 9.18114185333252, | |
| "learning_rate": 4.999855309550366e-06, | |
| "loss": 0.2762, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.9035087719298246, | |
| "grad_norm": 3.0800487995147705, | |
| "learning_rate": 4.999674450413725e-06, | |
| "loss": 0.2628, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.9122807017543859, | |
| "grad_norm": 82.03578186035156, | |
| "learning_rate": 4.999421254949728e-06, | |
| "loss": 0.4065, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.9210526315789473, | |
| "grad_norm": 77.66315460205078, | |
| "learning_rate": 4.99909573048542e-06, | |
| "loss": 0.4307, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.9298245614035088, | |
| "grad_norm": 18.28767967224121, | |
| "learning_rate": 4.998697886440927e-06, | |
| "loss": 0.2571, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.9385964912280702, | |
| "grad_norm": 5.960445880889893, | |
| "learning_rate": 4.998227734329177e-06, | |
| "loss": 0.2847, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.9473684210526315, | |
| "grad_norm": 5.437699794769287, | |
| "learning_rate": 4.9976852877555755e-06, | |
| "loss": 0.2728, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.956140350877193, | |
| "grad_norm": 3.379631280899048, | |
| "learning_rate": 4.997070562417602e-06, | |
| "loss": 0.2467, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.9649122807017544, | |
| "grad_norm": 3.1625075340270996, | |
| "learning_rate": 4.996383576104362e-06, | |
| "loss": 0.2273, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.9736842105263158, | |
| "grad_norm": 15.588600158691406, | |
| "learning_rate": 4.995624348696071e-06, | |
| "loss": 0.2486, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.9824561403508771, | |
| "grad_norm": 2.631044387817383, | |
| "learning_rate": 4.9947929021634815e-06, | |
| "loss": 0.1964, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.9912280701754386, | |
| "grad_norm": 4.706504821777344, | |
| "learning_rate": 4.993889260567239e-06, | |
| "loss": 0.1901, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 10.368465423583984, | |
| "learning_rate": 4.9929134500571954e-06, | |
| "loss": 0.1996, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 1.0087719298245614, | |
| "grad_norm": 30.44986343383789, | |
| "learning_rate": 4.991865498871647e-06, | |
| "loss": 0.2606, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 1.0175438596491229, | |
| "grad_norm": 14.421515464782715, | |
| "learning_rate": 4.99074543733652e-06, | |
| "loss": 0.2394, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 1.0263157894736843, | |
| "grad_norm": 14.072005271911621, | |
| "learning_rate": 4.989553297864489e-06, | |
| "loss": 0.2288, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 1.0350877192982457, | |
| "grad_norm": 4.395325660705566, | |
| "learning_rate": 4.988289114954045e-06, | |
| "loss": 0.2129, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 1.043859649122807, | |
| "grad_norm": 7.286703586578369, | |
| "learning_rate": 4.986952925188489e-06, | |
| "loss": 0.186, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 1.0526315789473684, | |
| "grad_norm": 8.332784652709961, | |
| "learning_rate": 4.98554476723488e-06, | |
| "loss": 0.178, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 1.0614035087719298, | |
| "grad_norm": 1.3646447658538818, | |
| "learning_rate": 4.984064681842917e-06, | |
| "loss": 0.1687, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 1.0701754385964912, | |
| "grad_norm": 4.494940757751465, | |
| "learning_rate": 4.982512711843753e-06, | |
| "loss": 0.1881, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 1.0789473684210527, | |
| "grad_norm": 3.3929836750030518, | |
| "learning_rate": 4.980888902148757e-06, | |
| "loss": 0.1764, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 1.087719298245614, | |
| "grad_norm": 1.8281155824661255, | |
| "learning_rate": 4.979193299748225e-06, | |
| "loss": 0.1602, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 1.0964912280701755, | |
| "grad_norm": 3.494239568710327, | |
| "learning_rate": 4.977425953710005e-06, | |
| "loss": 0.1729, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 1.1052631578947367, | |
| "grad_norm": 1.500410556793213, | |
| "learning_rate": 4.975586915178084e-06, | |
| "loss": 0.1666, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 1.1140350877192982, | |
| "grad_norm": 1.4680222272872925, | |
| "learning_rate": 4.973676237371111e-06, | |
| "loss": 0.159, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 1.1228070175438596, | |
| "grad_norm": 3.0383460521698, | |
| "learning_rate": 4.971693975580851e-06, | |
| "loss": 0.1484, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 1.131578947368421, | |
| "grad_norm": 3.74821138381958, | |
| "learning_rate": 4.969640187170591e-06, | |
| "loss": 0.1586, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 1.1403508771929824, | |
| "grad_norm": 4.682602405548096, | |
| "learning_rate": 4.967514931573473e-06, | |
| "loss": 0.1619, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 1.1491228070175439, | |
| "grad_norm": 3.90673565864563, | |
| "learning_rate": 4.965318270290779e-06, | |
| "loss": 0.164, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 1.1578947368421053, | |
| "grad_norm": 2.2017388343811035, | |
| "learning_rate": 4.963050266890152e-06, | |
| "loss": 0.1499, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 1.1666666666666667, | |
| "grad_norm": 2.4211816787719727, | |
| "learning_rate": 4.960710987003753e-06, | |
| "loss": 0.1387, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 1.1754385964912282, | |
| "grad_norm": 1.7753759622573853, | |
| "learning_rate": 4.958300498326363e-06, | |
| "loss": 0.1441, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 1.1842105263157894, | |
| "grad_norm": 1.5529910326004028, | |
| "learning_rate": 4.955818870613425e-06, | |
| "loss": 0.1304, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 1.1929824561403508, | |
| "grad_norm": 2.090593099594116, | |
| "learning_rate": 4.953266175679023e-06, | |
| "loss": 0.1419, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 1.2017543859649122, | |
| "grad_norm": 2.7141878604888916, | |
| "learning_rate": 4.95064248739381e-06, | |
| "loss": 0.1444, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 1.2105263157894737, | |
| "grad_norm": 2.3690481185913086, | |
| "learning_rate": 4.947947881682861e-06, | |
| "loss": 0.1383, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 1.219298245614035, | |
| "grad_norm": 2.2403147220611572, | |
| "learning_rate": 4.945182436523482e-06, | |
| "loss": 0.1418, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 1.2280701754385965, | |
| "grad_norm": 1.3939160108566284, | |
| "learning_rate": 4.942346231942955e-06, | |
| "loss": 0.1307, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 1.236842105263158, | |
| "grad_norm": 11.276732444763184, | |
| "learning_rate": 4.939439350016214e-06, | |
| "loss": 0.1397, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 1.2456140350877192, | |
| "grad_norm": 8.260516166687012, | |
| "learning_rate": 4.9364618748634794e-06, | |
| "loss": 0.1426, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 1.2543859649122808, | |
| "grad_norm": 2.09720516204834, | |
| "learning_rate": 4.933413892647819e-06, | |
| "loss": 0.1323, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 1.263157894736842, | |
| "grad_norm": 1.802125334739685, | |
| "learning_rate": 4.9302954915726535e-06, | |
| "loss": 0.1304, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 1.2719298245614035, | |
| "grad_norm": 1.7151471376419067, | |
| "learning_rate": 4.927106761879207e-06, | |
| "loss": 0.1264, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 1.280701754385965, | |
| "grad_norm": 1.6970336437225342, | |
| "learning_rate": 4.923847795843894e-06, | |
| "loss": 0.1227, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 1.2894736842105263, | |
| "grad_norm": 16.60441017150879, | |
| "learning_rate": 4.920518687775647e-06, | |
| "loss": 0.1606, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 1.2982456140350878, | |
| "grad_norm": 6.470354080200195, | |
| "learning_rate": 4.917119534013194e-06, | |
| "loss": 0.1447, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 1.3070175438596492, | |
| "grad_norm": 1.4908231496810913, | |
| "learning_rate": 4.913650432922264e-06, | |
| "loss": 0.1343, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 1.3157894736842106, | |
| "grad_norm": 3.19964861869812, | |
| "learning_rate": 4.91011148489274e-06, | |
| "loss": 0.1354, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 1.3245614035087718, | |
| "grad_norm": 2.6052839756011963, | |
| "learning_rate": 4.906502792335761e-06, | |
| "loss": 0.1342, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 1.3333333333333333, | |
| "grad_norm": 2.0719165802001953, | |
| "learning_rate": 4.9028244596807525e-06, | |
| "loss": 0.1359, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 1.3421052631578947, | |
| "grad_norm": 0.8086919784545898, | |
| "learning_rate": 4.899076593372405e-06, | |
| "loss": 0.1279, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 1.3508771929824561, | |
| "grad_norm": 1.0056848526000977, | |
| "learning_rate": 4.8952593018675955e-06, | |
| "loss": 0.1162, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 1.3596491228070176, | |
| "grad_norm": 5.72553014755249, | |
| "learning_rate": 4.891372695632249e-06, | |
| "loss": 0.1315, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 1.368421052631579, | |
| "grad_norm": 1.522894024848938, | |
| "learning_rate": 4.887416887138139e-06, | |
| "loss": 0.1266, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 1.3771929824561404, | |
| "grad_norm": 2.019472122192383, | |
| "learning_rate": 4.883391990859635e-06, | |
| "loss": 0.1262, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 1.3859649122807016, | |
| "grad_norm": 1.8594422340393066, | |
| "learning_rate": 4.879298123270391e-06, | |
| "loss": 0.125, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 1.3947368421052633, | |
| "grad_norm": 1.365377426147461, | |
| "learning_rate": 4.8751354028399725e-06, | |
| "loss": 0.1218, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 1.4035087719298245, | |
| "grad_norm": 3.553309917449951, | |
| "learning_rate": 4.870903950030429e-06, | |
| "loss": 0.1272, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 1.412280701754386, | |
| "grad_norm": 2.1770920753479004, | |
| "learning_rate": 4.866603887292809e-06, | |
| "loss": 0.1213, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 1.4210526315789473, | |
| "grad_norm": 1.6058955192565918, | |
| "learning_rate": 4.862235339063613e-06, | |
| "loss": 0.1173, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 1.4298245614035088, | |
| "grad_norm": 1.3208314180374146, | |
| "learning_rate": 4.857798431761199e-06, | |
| "loss": 0.1183, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 1.4385964912280702, | |
| "grad_norm": 1.282729983329773, | |
| "learning_rate": 4.853293293782118e-06, | |
| "loss": 0.1209, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 1.4473684210526316, | |
| "grad_norm": 1.3838152885437012, | |
| "learning_rate": 4.848720055497401e-06, | |
| "loss": 0.1198, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 1.456140350877193, | |
| "grad_norm": 1.2930737733840942, | |
| "learning_rate": 4.844078849248785e-06, | |
| "loss": 0.1268, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 1.4649122807017543, | |
| "grad_norm": 1.7022266387939453, | |
| "learning_rate": 4.839369809344888e-06, | |
| "loss": 0.1198, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 1.4736842105263157, | |
| "grad_norm": 1.0927815437316895, | |
| "learning_rate": 4.834593072057313e-06, | |
| "loss": 0.1132, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 1.4824561403508771, | |
| "grad_norm": 0.9326333999633789, | |
| "learning_rate": 4.829748775616716e-06, | |
| "loss": 0.1193, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 1.4912280701754386, | |
| "grad_norm": 1.3564742803573608, | |
| "learning_rate": 4.8248370602087954e-06, | |
| "loss": 0.118, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "grad_norm": 1.19778573513031, | |
| "learning_rate": 4.819858067970243e-06, | |
| "loss": 0.1122, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 1.5087719298245614, | |
| "grad_norm": 2.8438351154327393, | |
| "learning_rate": 4.814811942984625e-06, | |
| "loss": 0.1217, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 1.5175438596491229, | |
| "grad_norm": 1.0701063871383667, | |
| "learning_rate": 4.809698831278217e-06, | |
| "loss": 0.1114, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 1.526315789473684, | |
| "grad_norm": 0.9053553938865662, | |
| "learning_rate": 4.804518880815776e-06, | |
| "loss": 0.1178, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 1.5350877192982457, | |
| "grad_norm": 0.42274603247642517, | |
| "learning_rate": 4.799272241496259e-06, | |
| "loss": 0.1091, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 1.543859649122807, | |
| "grad_norm": 0.8576470017433167, | |
| "learning_rate": 4.793959065148484e-06, | |
| "loss": 0.1134, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 1.5526315789473686, | |
| "grad_norm": 0.5910662412643433, | |
| "learning_rate": 4.78857950552674e-06, | |
| "loss": 0.1148, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 1.5614035087719298, | |
| "grad_norm": 0.8761632442474365, | |
| "learning_rate": 4.783133718306331e-06, | |
| "loss": 0.1125, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 1.5701754385964912, | |
| "grad_norm": 1.9190795421600342, | |
| "learning_rate": 4.777621861079079e-06, | |
| "loss": 0.1148, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 1.5789473684210527, | |
| "grad_norm": 0.6199957728385925, | |
| "learning_rate": 4.772044093348757e-06, | |
| "loss": 0.1097, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 1.587719298245614, | |
| "grad_norm": 1.562089443206787, | |
| "learning_rate": 4.766400576526479e-06, | |
| "loss": 0.1097, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 1.5964912280701755, | |
| "grad_norm": 1.4957091808319092, | |
| "learning_rate": 4.760691473926021e-06, | |
| "loss": 0.1216, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 1.6052631578947367, | |
| "grad_norm": 0.9863570332527161, | |
| "learning_rate": 4.754916950759105e-06, | |
| "loss": 0.1122, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 1.6140350877192984, | |
| "grad_norm": 0.5803346633911133, | |
| "learning_rate": 4.749077174130609e-06, | |
| "loss": 0.1103, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 1.6228070175438596, | |
| "grad_norm": 1.8789891004562378, | |
| "learning_rate": 4.743172313033738e-06, | |
| "loss": 0.1191, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 1.631578947368421, | |
| "grad_norm": 0.8731380105018616, | |
| "learning_rate": 4.7372025383451285e-06, | |
| "loss": 0.1154, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 1.6403508771929824, | |
| "grad_norm": 1.3535627126693726, | |
| "learning_rate": 4.7311680228199075e-06, | |
| "loss": 0.1123, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 1.6491228070175439, | |
| "grad_norm": 0.7211089134216309, | |
| "learning_rate": 4.725068941086693e-06, | |
| "loss": 0.1134, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 1.6578947368421053, | |
| "grad_norm": 1.4752328395843506, | |
| "learning_rate": 4.718905469642534e-06, | |
| "loss": 0.1185, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 1.6666666666666665, | |
| "grad_norm": 0.9822680354118347, | |
| "learning_rate": 4.712677786847814e-06, | |
| "loss": 0.1146, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 1.6754385964912282, | |
| "grad_norm": 1.1308330297470093, | |
| "learning_rate": 4.706386072921083e-06, | |
| "loss": 0.1061, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 1.6842105263157894, | |
| "grad_norm": 5.331939697265625, | |
| "learning_rate": 4.70003050993384e-06, | |
| "loss": 0.1153, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 1.692982456140351, | |
| "grad_norm": 0.6911673545837402, | |
| "learning_rate": 4.6936112818052674e-06, | |
| "loss": 0.1098, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 1.7017543859649122, | |
| "grad_norm": 0.5160980224609375, | |
| "learning_rate": 4.687128574296912e-06, | |
| "loss": 0.1073, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 1.7105263157894737, | |
| "grad_norm": 1.5724798440933228, | |
| "learning_rate": 4.680582575007303e-06, | |
| "loss": 0.121, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 1.719298245614035, | |
| "grad_norm": 1.3960011005401611, | |
| "learning_rate": 4.6739734733665275e-06, | |
| "loss": 0.1145, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 1.7280701754385965, | |
| "grad_norm": 1.4949183464050293, | |
| "learning_rate": 4.6673014606307465e-06, | |
| "loss": 0.1166, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 1.736842105263158, | |
| "grad_norm": 1.6873422861099243, | |
| "learning_rate": 4.660566729876661e-06, | |
| "loss": 0.1115, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 1.7456140350877192, | |
| "grad_norm": 1.3443641662597656, | |
| "learning_rate": 4.653769475995926e-06, | |
| "loss": 0.1119, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 1.7543859649122808, | |
| "grad_norm": 0.807525098323822, | |
| "learning_rate": 4.646909895689508e-06, | |
| "loss": 0.1059, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 1.763157894736842, | |
| "grad_norm": 1.589316964149475, | |
| "learning_rate": 4.639988187461995e-06, | |
| "loss": 0.1151, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 1.7719298245614035, | |
| "grad_norm": 2.474756956100464, | |
| "learning_rate": 4.633004551615851e-06, | |
| "loss": 0.116, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 1.780701754385965, | |
| "grad_norm": 0.6210195422172546, | |
| "learning_rate": 4.62595919024562e-06, | |
| "loss": 0.1097, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 1.7894736842105263, | |
| "grad_norm": 0.7217905521392822, | |
| "learning_rate": 4.618852307232078e-06, | |
| "loss": 0.1117, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 1.7982456140350878, | |
| "grad_norm": 1.551251769065857, | |
| "learning_rate": 4.611684108236334e-06, | |
| "loss": 0.113, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 1.807017543859649, | |
| "grad_norm": 0.6619828939437866, | |
| "learning_rate": 4.604454800693874e-06, | |
| "loss": 0.113, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 1.8157894736842106, | |
| "grad_norm": 0.9461805820465088, | |
| "learning_rate": 4.597164593808564e-06, | |
| "loss": 0.1093, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 1.8245614035087718, | |
| "grad_norm": 1.2926547527313232, | |
| "learning_rate": 4.589813698546592e-06, | |
| "loss": 0.1128, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 1.8333333333333335, | |
| "grad_norm": 0.8754212856292725, | |
| "learning_rate": 4.582402327630368e-06, | |
| "loss": 0.1104, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 1.8421052631578947, | |
| "grad_norm": 0.846051812171936, | |
| "learning_rate": 4.574930695532357e-06, | |
| "loss": 0.1105, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 1.8508771929824561, | |
| "grad_norm": 1.3332515954971313, | |
| "learning_rate": 4.567399018468889e-06, | |
| "loss": 0.1101, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 1.8596491228070176, | |
| "grad_norm": 0.8729192614555359, | |
| "learning_rate": 4.5598075143938855e-06, | |
| "loss": 0.1081, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 1.868421052631579, | |
| "grad_norm": 0.8618345260620117, | |
| "learning_rate": 4.552156402992567e-06, | |
| "loss": 0.1059, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 1.8771929824561404, | |
| "grad_norm": 1.2135930061340332, | |
| "learning_rate": 4.544445905675082e-06, | |
| "loss": 0.1105, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 1.8859649122807016, | |
| "grad_norm": 0.8405666351318359, | |
| "learning_rate": 4.536676245570111e-06, | |
| "loss": 0.1118, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 1.8947368421052633, | |
| "grad_norm": 0.42860639095306396, | |
| "learning_rate": 4.528847647518403e-06, | |
| "loss": 0.1093, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 1.9035087719298245, | |
| "grad_norm": 1.1538206338882446, | |
| "learning_rate": 4.520960338066271e-06, | |
| "loss": 0.1088, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 1.912280701754386, | |
| "grad_norm": 0.5870749354362488, | |
| "learning_rate": 4.513014545459038e-06, | |
| "loss": 0.1061, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 1.9210526315789473, | |
| "grad_norm": 0.7279748916625977, | |
| "learning_rate": 4.505010499634427e-06, | |
| "loss": 0.1032, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 1.9298245614035088, | |
| "grad_norm": 0.6331414580345154, | |
| "learning_rate": 4.4969484322159125e-06, | |
| "loss": 0.1109, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 1.9385964912280702, | |
| "grad_norm": 0.9024543166160583, | |
| "learning_rate": 4.488828576506014e-06, | |
| "loss": 0.1094, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 1.9473684210526314, | |
| "grad_norm": 3.540376901626587, | |
| "learning_rate": 4.480651167479545e-06, | |
| "loss": 0.1154, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 1.956140350877193, | |
| "grad_norm": 0.9506739377975464, | |
| "learning_rate": 4.472416441776817e-06, | |
| "loss": 0.108, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 1.9649122807017543, | |
| "grad_norm": 0.6585081815719604, | |
| "learning_rate": 4.464124637696786e-06, | |
| "loss": 0.1033, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 1.973684210526316, | |
| "grad_norm": 1.143038034439087, | |
| "learning_rate": 4.455775995190161e-06, | |
| "loss": 0.1092, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 1.9824561403508771, | |
| "grad_norm": 1.148261547088623, | |
| "learning_rate": 4.4473707558524555e-06, | |
| "loss": 0.1076, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 1.9912280701754386, | |
| "grad_norm": 0.7375811338424683, | |
| "learning_rate": 4.438909162917003e-06, | |
| "loss": 0.108, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 0.5254591703414917, | |
| "learning_rate": 4.430391461247911e-06, | |
| "loss": 0.1079, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 2.008771929824561, | |
| "grad_norm": 1.0198495388031006, | |
| "learning_rate": 4.42181789733298e-06, | |
| "loss": 0.1083, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 2.017543859649123, | |
| "grad_norm": 0.9234157800674438, | |
| "learning_rate": 4.413188719276569e-06, | |
| "loss": 0.1084, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 2.026315789473684, | |
| "grad_norm": 0.5215068459510803, | |
| "learning_rate": 4.404504176792414e-06, | |
| "loss": 0.1067, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 2.0350877192982457, | |
| "grad_norm": 0.9296736121177673, | |
| "learning_rate": 4.3957645211964065e-06, | |
| "loss": 0.1066, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 2.043859649122807, | |
| "grad_norm": 0.8660671710968018, | |
| "learning_rate": 4.386970005399314e-06, | |
| "loss": 0.108, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 2.0526315789473686, | |
| "grad_norm": 0.6014883518218994, | |
| "learning_rate": 4.378120883899467e-06, | |
| "loss": 0.1068, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 2.06140350877193, | |
| "grad_norm": 0.6370371580123901, | |
| "learning_rate": 4.369217412775393e-06, | |
| "loss": 0.1076, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 2.0701754385964914, | |
| "grad_norm": 0.9806828498840332, | |
| "learning_rate": 4.360259849678402e-06, | |
| "loss": 0.1071, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 2.0789473684210527, | |
| "grad_norm": 0.6093440651893616, | |
| "learning_rate": 4.351248453825137e-06, | |
| "loss": 0.1038, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 2.087719298245614, | |
| "grad_norm": 1.3494842052459717, | |
| "learning_rate": 4.3421834859900695e-06, | |
| "loss": 0.1105, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 2.0964912280701755, | |
| "grad_norm": 0.7621576189994812, | |
| "learning_rate": 4.333065208497949e-06, | |
| "loss": 0.1048, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 2.1052631578947367, | |
| "grad_norm": 0.5918282866477966, | |
| "learning_rate": 4.3238938852162195e-06, | |
| "loss": 0.1086, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 2.1140350877192984, | |
| "grad_norm": 0.7048676609992981, | |
| "learning_rate": 4.314669781547379e-06, | |
| "loss": 0.1061, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 2.1228070175438596, | |
| "grad_norm": 1.0750821828842163, | |
| "learning_rate": 4.305393164421301e-06, | |
| "loss": 0.1082, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 2.1315789473684212, | |
| "grad_norm": 0.6171414852142334, | |
| "learning_rate": 4.296064302287507e-06, | |
| "loss": 0.1039, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 2.1403508771929824, | |
| "grad_norm": 0.8080905079841614, | |
| "learning_rate": 4.286683465107403e-06, | |
| "loss": 0.1069, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 2.1491228070175437, | |
| "grad_norm": 0.5281466245651245, | |
| "learning_rate": 4.277250924346461e-06, | |
| "loss": 0.1069, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 2.1578947368421053, | |
| "grad_norm": 0.8070254325866699, | |
| "learning_rate": 4.267766952966369e-06, | |
| "loss": 0.1061, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 2.1666666666666665, | |
| "grad_norm": 0.8560577630996704, | |
| "learning_rate": 4.25823182541713e-06, | |
| "loss": 0.1116, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 2.175438596491228, | |
| "grad_norm": 0.7772330045700073, | |
| "learning_rate": 4.2486458176291176e-06, | |
| "loss": 0.1092, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 2.1842105263157894, | |
| "grad_norm": 0.814601719379425, | |
| "learning_rate": 4.239009207005096e-06, | |
| "loss": 0.1093, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 2.192982456140351, | |
| "grad_norm": 0.957789957523346, | |
| "learning_rate": 4.2293222724121855e-06, | |
| "loss": 0.1075, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 2.2017543859649122, | |
| "grad_norm": 0.500062108039856, | |
| "learning_rate": 4.219585294173799e-06, | |
| "loss": 0.1048, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 2.2105263157894735, | |
| "grad_norm": 0.3866419792175293, | |
| "learning_rate": 4.209798554061527e-06, | |
| "loss": 0.1074, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 2.219298245614035, | |
| "grad_norm": 1.1853291988372803, | |
| "learning_rate": 4.199962335286985e-06, | |
| "loss": 0.1076, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 2.2280701754385963, | |
| "grad_norm": 0.36602887511253357, | |
| "learning_rate": 4.1900769224936125e-06, | |
| "loss": 0.108, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 2.236842105263158, | |
| "grad_norm": 0.2530711889266968, | |
| "learning_rate": 4.180142601748447e-06, | |
| "loss": 0.1041, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 2.245614035087719, | |
| "grad_norm": 1.3067054748535156, | |
| "learning_rate": 4.170159660533834e-06, | |
| "loss": 0.1087, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 2.254385964912281, | |
| "grad_norm": 0.3442043960094452, | |
| "learning_rate": 4.160128387739114e-06, | |
| "loss": 0.1099, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 2.263157894736842, | |
| "grad_norm": 1.174796462059021, | |
| "learning_rate": 4.150049073652262e-06, | |
| "loss": 0.1063, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 2.2719298245614037, | |
| "grad_norm": 0.5719411969184875, | |
| "learning_rate": 4.1399220099514845e-06, | |
| "loss": 0.1043, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 2.280701754385965, | |
| "grad_norm": 0.7268956303596497, | |
| "learning_rate": 4.129747489696781e-06, | |
| "loss": 0.1038, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 2.2894736842105265, | |
| "grad_norm": 0.7028316259384155, | |
| "learning_rate": 4.119525807321467e-06, | |
| "loss": 0.1052, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 2.2982456140350878, | |
| "grad_norm": 1.015335202217102, | |
| "learning_rate": 4.109257258623644e-06, | |
| "loss": 0.1116, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 2.307017543859649, | |
| "grad_norm": 0.7141755819320679, | |
| "learning_rate": 4.098942140757646e-06, | |
| "loss": 0.108, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 2.3157894736842106, | |
| "grad_norm": 0.7656403183937073, | |
| "learning_rate": 4.0885807522254435e-06, | |
| "loss": 0.1043, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 2.324561403508772, | |
| "grad_norm": 0.43293774127960205, | |
| "learning_rate": 4.078173392867998e-06, | |
| "loss": 0.1048, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 2.3333333333333335, | |
| "grad_norm": 0.6755763292312622, | |
| "learning_rate": 4.0677203638565895e-06, | |
| "loss": 0.1064, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 2.3421052631578947, | |
| "grad_norm": 0.9648827314376831, | |
| "learning_rate": 4.0572219676841e-06, | |
| "loss": 0.1088, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 2.3508771929824563, | |
| "grad_norm": 0.32724836468696594, | |
| "learning_rate": 4.046678508156259e-06, | |
| "loss": 0.1077, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 2.3596491228070176, | |
| "grad_norm": 0.4696657061576843, | |
| "learning_rate": 4.036090290382855e-06, | |
| "loss": 0.1067, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 2.3684210526315788, | |
| "grad_norm": 0.33901306986808777, | |
| "learning_rate": 4.025457620768901e-06, | |
| "loss": 0.105, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 2.3771929824561404, | |
| "grad_norm": 0.5703794360160828, | |
| "learning_rate": 4.014780807005775e-06, | |
| "loss": 0.1033, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 2.3859649122807016, | |
| "grad_norm": 0.9639355540275574, | |
| "learning_rate": 4.004060158062306e-06, | |
| "loss": 0.1041, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 2.3947368421052633, | |
| "grad_norm": 0.8851558566093445, | |
| "learning_rate": 3.993295984175845e-06, | |
| "loss": 0.1064, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 2.4035087719298245, | |
| "grad_norm": 0.5200062990188599, | |
| "learning_rate": 3.982488596843276e-06, | |
| "loss": 0.1056, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 2.412280701754386, | |
| "grad_norm": 1.160823106765747, | |
| "learning_rate": 3.971638308812007e-06, | |
| "loss": 0.1069, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 2.4210526315789473, | |
| "grad_norm": 1.0191210508346558, | |
| "learning_rate": 3.9607454340709215e-06, | |
| "loss": 0.1042, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 2.4298245614035086, | |
| "grad_norm": 0.37181487679481506, | |
| "learning_rate": 3.949810287841289e-06, | |
| "loss": 0.1062, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 2.43859649122807, | |
| "grad_norm": 0.9328593611717224, | |
| "learning_rate": 3.9388331865676436e-06, | |
| "loss": 0.1086, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 2.4473684210526314, | |
| "grad_norm": 0.8024734258651733, | |
| "learning_rate": 3.927814447908625e-06, | |
| "loss": 0.1051, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 2.456140350877193, | |
| "grad_norm": 0.9746696352958679, | |
| "learning_rate": 3.916754390727795e-06, | |
| "loss": 0.1041, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 2.4649122807017543, | |
| "grad_norm": 0.5457844138145447, | |
| "learning_rate": 3.905653335084394e-06, | |
| "loss": 0.1052, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 2.473684210526316, | |
| "grad_norm": 1.0736924409866333, | |
| "learning_rate": 3.8945116022240945e-06, | |
| "loss": 0.1075, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 2.482456140350877, | |
| "grad_norm": 0.6335628032684326, | |
| "learning_rate": 3.8833295145696964e-06, | |
| "loss": 0.1036, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 2.4912280701754383, | |
| "grad_norm": 0.6909618377685547, | |
| "learning_rate": 3.872107395711799e-06, | |
| "loss": 0.1089, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "grad_norm": 2.1871702671051025, | |
| "learning_rate": 3.860845570399435e-06, | |
| "loss": 0.1066, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 2.5087719298245617, | |
| "grad_norm": 0.5831722617149353, | |
| "learning_rate": 3.849544364530678e-06, | |
| "loss": 0.1055, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 2.517543859649123, | |
| "grad_norm": 0.5302637815475464, | |
| "learning_rate": 3.838204105143204e-06, | |
| "loss": 0.1057, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 2.526315789473684, | |
| "grad_norm": 0.6348035931587219, | |
| "learning_rate": 3.8268251204048335e-06, | |
| "loss": 0.1089, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 2.5350877192982457, | |
| "grad_norm": 2.1932008266448975, | |
| "learning_rate": 3.815407739604033e-06, | |
| "loss": 0.1043, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 2.543859649122807, | |
| "grad_norm": 0.4388940930366516, | |
| "learning_rate": 3.803952293140385e-06, | |
| "loss": 0.1055, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 2.5526315789473686, | |
| "grad_norm": 0.6853339076042175, | |
| "learning_rate": 3.7924591125150265e-06, | |
| "loss": 0.1036, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 2.56140350877193, | |
| "grad_norm": 0.34744876623153687, | |
| "learning_rate": 3.78092853032106e-06, | |
| "loss": 0.1025, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 2.5701754385964914, | |
| "grad_norm": 0.9523847699165344, | |
| "learning_rate": 3.769360880233922e-06, | |
| "loss": 0.1067, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 2.5789473684210527, | |
| "grad_norm": 1.303745985031128, | |
| "learning_rate": 3.7577564970017338e-06, | |
| "loss": 0.1082, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 2.587719298245614, | |
| "grad_norm": 0.9468981623649597, | |
| "learning_rate": 3.7461157164356103e-06, | |
| "loss": 0.1055, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 2.5964912280701755, | |
| "grad_norm": 0.7204175591468811, | |
| "learning_rate": 3.7344388753999434e-06, | |
| "loss": 0.1055, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 2.6052631578947367, | |
| "grad_norm": 0.5110165476799011, | |
| "learning_rate": 3.7227263118026537e-06, | |
| "loss": 0.1092, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 2.6140350877192984, | |
| "grad_norm": 0.6483246088027954, | |
| "learning_rate": 3.7109783645854116e-06, | |
| "loss": 0.1078, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 2.6228070175438596, | |
| "grad_norm": 0.5058422684669495, | |
| "learning_rate": 3.699195373713831e-06, | |
| "loss": 0.1073, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 2.6315789473684212, | |
| "grad_norm": 0.4123518764972687, | |
| "learning_rate": 3.6873776801676265e-06, | |
| "loss": 0.1053, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 2.6403508771929824, | |
| "grad_norm": 1.0864709615707397, | |
| "learning_rate": 3.675525625930751e-06, | |
| "loss": 0.1048, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 2.6491228070175437, | |
| "grad_norm": 1.0264904499053955, | |
| "learning_rate": 3.6636395539814975e-06, | |
| "loss": 0.1059, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 2.6578947368421053, | |
| "grad_norm": 0.7724822163581848, | |
| "learning_rate": 3.651719808282573e-06, | |
| "loss": 0.1063, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 2.6666666666666665, | |
| "grad_norm": 0.7474755644798279, | |
| "learning_rate": 3.6397667337711475e-06, | |
| "loss": 0.1034, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 2.675438596491228, | |
| "grad_norm": 0.5628909468650818, | |
| "learning_rate": 3.6277806763488666e-06, | |
| "loss": 0.1026, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 2.6842105263157894, | |
| "grad_norm": 0.9070547819137573, | |
| "learning_rate": 3.6157619828718477e-06, | |
| "loss": 0.1031, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 2.692982456140351, | |
| "grad_norm": 0.6968091130256653, | |
| "learning_rate": 3.603711001140641e-06, | |
| "loss": 0.1068, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 2.7017543859649122, | |
| "grad_norm": 0.3764977753162384, | |
| "learning_rate": 3.5916280798901604e-06, | |
| "loss": 0.1038, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 2.7105263157894735, | |
| "grad_norm": 5.012625694274902, | |
| "learning_rate": 3.5795135687795984e-06, | |
| "loss": 0.1129, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 2.719298245614035, | |
| "grad_norm": 0.6745572686195374, | |
| "learning_rate": 3.567367818382303e-06, | |
| "loss": 0.1071, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 2.7280701754385968, | |
| "grad_norm": 1.0659606456756592, | |
| "learning_rate": 3.555191180175634e-06, | |
| "loss": 0.1067, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 2.736842105263158, | |
| "grad_norm": 1.7312604188919067, | |
| "learning_rate": 3.5429840065307924e-06, | |
| "loss": 0.1101, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 2.745614035087719, | |
| "grad_norm": 1.100364327430725, | |
| "learning_rate": 3.5307466507026223e-06, | |
| "loss": 0.1098, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 2.754385964912281, | |
| "grad_norm": 1.0390428304672241, | |
| "learning_rate": 3.5184794668193893e-06, | |
| "loss": 0.1094, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 2.763157894736842, | |
| "grad_norm": 0.3369971811771393, | |
| "learning_rate": 3.5061828098725327e-06, | |
| "loss": 0.1053, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 2.7719298245614032, | |
| "grad_norm": 0.6130257248878479, | |
| "learning_rate": 3.4938570357063906e-06, | |
| "loss": 0.106, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 2.780701754385965, | |
| "grad_norm": 0.6387595534324646, | |
| "learning_rate": 3.481502501007904e-06, | |
| "loss": 0.1044, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 2.7894736842105265, | |
| "grad_norm": 1.0731587409973145, | |
| "learning_rate": 3.469119563296296e-06, | |
| "loss": 0.1097, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 2.7982456140350878, | |
| "grad_norm": 0.8096229434013367, | |
| "learning_rate": 3.4567085809127247e-06, | |
| "loss": 0.1076, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 2.807017543859649, | |
| "grad_norm": 0.5034844279289246, | |
| "learning_rate": 3.444269913009912e-06, | |
| "loss": 0.1071, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 2.8157894736842106, | |
| "grad_norm": 0.675139307975769, | |
| "learning_rate": 3.4318039195417536e-06, | |
| "loss": 0.1039, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 2.824561403508772, | |
| "grad_norm": 0.7330355644226074, | |
| "learning_rate": 3.4193109612528972e-06, | |
| "loss": 0.1044, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 2.8333333333333335, | |
| "grad_norm": 0.6558271646499634, | |
| "learning_rate": 3.4067913996683115e-06, | |
| "loss": 0.1051, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 2.8421052631578947, | |
| "grad_norm": 0.8411844372749329, | |
| "learning_rate": 3.3942455970828146e-06, | |
| "loss": 0.1063, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 2.8508771929824563, | |
| "grad_norm": 0.4817325174808502, | |
| "learning_rate": 3.3816739165505964e-06, | |
| "loss": 0.105, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 2.8596491228070176, | |
| "grad_norm": 0.424554705619812, | |
| "learning_rate": 3.3690767218747104e-06, | |
| "loss": 0.1037, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 2.8684210526315788, | |
| "grad_norm": 1.0054417848587036, | |
| "learning_rate": 3.3564543775965475e-06, | |
| "loss": 0.1058, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 2.8771929824561404, | |
| "grad_norm": 0.8984584808349609, | |
| "learning_rate": 3.3438072489852837e-06, | |
| "loss": 0.1079, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 2.8859649122807016, | |
| "grad_norm": 0.6779558062553406, | |
| "learning_rate": 3.331135702027311e-06, | |
| "loss": 0.1046, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 2.8947368421052633, | |
| "grad_norm": 0.6931657195091248, | |
| "learning_rate": 3.318440103415649e-06, | |
| "loss": 0.1106, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 2.9035087719298245, | |
| "grad_norm": 0.705264151096344, | |
| "learning_rate": 3.305720820539329e-06, | |
| "loss": 0.104, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 2.912280701754386, | |
| "grad_norm": 0.7799407839775085, | |
| "learning_rate": 3.2929782214727657e-06, | |
| "loss": 0.1019, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 2.9210526315789473, | |
| "grad_norm": 0.7583760619163513, | |
| "learning_rate": 3.2802126749651042e-06, | |
| "loss": 0.1049, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 2.9298245614035086, | |
| "grad_norm": 0.6145837306976318, | |
| "learning_rate": 3.2674245504295505e-06, | |
| "loss": 0.104, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 2.93859649122807, | |
| "grad_norm": 0.5170779228210449, | |
| "learning_rate": 3.254614217932679e-06, | |
| "loss": 0.1024, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 2.9473684210526314, | |
| "grad_norm": 0.6850940585136414, | |
| "learning_rate": 3.241782048183726e-06, | |
| "loss": 0.1047, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 2.956140350877193, | |
| "grad_norm": 0.7307694554328918, | |
| "learning_rate": 3.2289284125238597e-06, | |
| "loss": 0.1032, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 2.9649122807017543, | |
| "grad_norm": 0.3386179208755493, | |
| "learning_rate": 3.216053682915436e-06, | |
| "loss": 0.1037, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 2.973684210526316, | |
| "grad_norm": 0.7565059065818787, | |
| "learning_rate": 3.203158231931234e-06, | |
| "loss": 0.1048, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 2.982456140350877, | |
| "grad_norm": 0.7902039289474487, | |
| "learning_rate": 3.190242432743673e-06, | |
| "loss": 0.1068, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 2.9912280701754383, | |
| "grad_norm": 0.42595192790031433, | |
| "learning_rate": 3.177306659114015e-06, | |
| "loss": 0.1039, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "grad_norm": 1.1214542388916016, | |
| "learning_rate": 3.164351285381549e-06, | |
| "loss": 0.1062, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 3.008771929824561, | |
| "grad_norm": 0.7622955441474915, | |
| "learning_rate": 3.1513766864527577e-06, | |
| "loss": 0.1015, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 3.017543859649123, | |
| "grad_norm": 0.2676297724246979, | |
| "learning_rate": 3.1383832377904676e-06, | |
| "loss": 0.1037, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 3.026315789473684, | |
| "grad_norm": 0.8695605397224426, | |
| "learning_rate": 3.1253713154029857e-06, | |
| "loss": 0.1056, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 3.0350877192982457, | |
| "grad_norm": 0.5875906944274902, | |
| "learning_rate": 3.1123412958332155e-06, | |
| "loss": 0.1067, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 3.043859649122807, | |
| "grad_norm": 0.7699372172355652, | |
| "learning_rate": 3.0992935561477632e-06, | |
| "loss": 0.1035, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 3.0526315789473686, | |
| "grad_norm": 0.5919204354286194, | |
| "learning_rate": 3.0862284739260247e-06, | |
| "loss": 0.1023, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 3.06140350877193, | |
| "grad_norm": 1.3211849927902222, | |
| "learning_rate": 3.07314642724926e-06, | |
| "loss": 0.1065, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 3.0701754385964914, | |
| "grad_norm": 0.6359637379646301, | |
| "learning_rate": 3.0600477946896494e-06, | |
| "loss": 0.106, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 3.0789473684210527, | |
| "grad_norm": 0.35776662826538086, | |
| "learning_rate": 3.046932955299344e-06, | |
| "loss": 0.1046, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 3.087719298245614, | |
| "grad_norm": 0.6657406687736511, | |
| "learning_rate": 3.0338022885994904e-06, | |
| "loss": 0.1076, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 3.0964912280701755, | |
| "grad_norm": 0.7587785720825195, | |
| "learning_rate": 3.0206561745692512e-06, | |
| "loss": 0.1043, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 3.1052631578947367, | |
| "grad_norm": 1.1258317232131958, | |
| "learning_rate": 3.0074949936348084e-06, | |
| "loss": 0.1043, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 3.1140350877192984, | |
| "grad_norm": 0.3570568263530731, | |
| "learning_rate": 2.9943191266583564e-06, | |
| "loss": 0.1032, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 3.1228070175438596, | |
| "grad_norm": 0.843485414981842, | |
| "learning_rate": 2.981128954927075e-06, | |
| "loss": 0.1045, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 3.1315789473684212, | |
| "grad_norm": 0.5719651579856873, | |
| "learning_rate": 2.967924860142103e-06, | |
| "loss": 0.1052, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 3.1403508771929824, | |
| "grad_norm": 2.20767879486084, | |
| "learning_rate": 2.9547072244074853e-06, | |
| "loss": 0.1078, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 3.1491228070175437, | |
| "grad_norm": 0.3715457022190094, | |
| "learning_rate": 2.941476430219122e-06, | |
| "loss": 0.1047, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 3.1578947368421053, | |
| "grad_norm": 0.7803200483322144, | |
| "learning_rate": 2.928232860453694e-06, | |
| "loss": 0.1029, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 3.1666666666666665, | |
| "grad_norm": 0.5198164582252502, | |
| "learning_rate": 2.9149768983575884e-06, | |
| "loss": 0.1032, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 3.175438596491228, | |
| "grad_norm": 0.7827185988426208, | |
| "learning_rate": 2.9017089275358017e-06, | |
| "loss": 0.1043, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 3.1842105263157894, | |
| "grad_norm": 0.4000351130962372, | |
| "learning_rate": 2.8884293319408464e-06, | |
| "loss": 0.1071, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 3.192982456140351, | |
| "grad_norm": 0.9913386106491089, | |
| "learning_rate": 2.8751384958616318e-06, | |
| "loss": 0.1022, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 3.2017543859649122, | |
| "grad_norm": 0.6975695490837097, | |
| "learning_rate": 2.861836803912353e-06, | |
| "loss": 0.1029, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 3.2105263157894735, | |
| "grad_norm": 0.2372695654630661, | |
| "learning_rate": 2.8485246410213497e-06, | |
| "loss": 0.1015, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 3.219298245614035, | |
| "grad_norm": 0.447732537984848, | |
| "learning_rate": 2.835202392419977e-06, | |
| "loss": 0.1052, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 3.2280701754385963, | |
| "grad_norm": 0.6617346405982971, | |
| "learning_rate": 2.8218704436314525e-06, | |
| "loss": 0.1055, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 3.236842105263158, | |
| "grad_norm": 0.5550402402877808, | |
| "learning_rate": 2.8085291804596995e-06, | |
| "loss": 0.102, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 3.245614035087719, | |
| "grad_norm": 0.6046020984649658, | |
| "learning_rate": 2.795178988978185e-06, | |
| "loss": 0.1036, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 3.254385964912281, | |
| "grad_norm": 0.41890618205070496, | |
| "learning_rate": 2.781820255518745e-06, | |
| "loss": 0.1036, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 3.263157894736842, | |
| "grad_norm": 0.8387415409088135, | |
| "learning_rate": 2.768453366660408e-06, | |
| "loss": 0.1076, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 3.2719298245614037, | |
| "grad_norm": 0.5318773984909058, | |
| "learning_rate": 2.755078709218203e-06, | |
| "loss": 0.1052, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 3.280701754385965, | |
| "grad_norm": 0.6617523431777954, | |
| "learning_rate": 2.741696670231969e-06, | |
| "loss": 0.1049, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 3.2894736842105265, | |
| "grad_norm": 1.0190025568008423, | |
| "learning_rate": 2.728307636955156e-06, | |
| "loss": 0.1034, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 3.2982456140350878, | |
| "grad_norm": 0.6924716234207153, | |
| "learning_rate": 2.714911996843617e-06, | |
| "loss": 0.1065, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 3.307017543859649, | |
| "grad_norm": 0.42501118779182434, | |
| "learning_rate": 2.701510137544393e-06, | |
| "loss": 0.1019, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 3.3157894736842106, | |
| "grad_norm": 0.844886064529419, | |
| "learning_rate": 2.6881024468845e-06, | |
| "loss": 0.1047, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 3.324561403508772, | |
| "grad_norm": 0.46512728929519653, | |
| "learning_rate": 2.674689312859704e-06, | |
| "loss": 0.1043, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 3.3333333333333335, | |
| "grad_norm": 0.6242017149925232, | |
| "learning_rate": 2.6612711236232915e-06, | |
| "loss": 0.1046, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 3.3421052631578947, | |
| "grad_norm": 0.6578526496887207, | |
| "learning_rate": 2.6478482674748375e-06, | |
| "loss": 0.1031, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 3.3508771929824563, | |
| "grad_norm": 0.4822542667388916, | |
| "learning_rate": 2.63442113284897e-06, | |
| "loss": 0.1053, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 3.3596491228070176, | |
| "grad_norm": 0.48255595564842224, | |
| "learning_rate": 2.6209901083041307e-06, | |
| "loss": 0.1058, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 3.3684210526315788, | |
| "grad_norm": 0.6624025702476501, | |
| "learning_rate": 2.6075555825113265e-06, | |
| "loss": 0.1066, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 3.3771929824561404, | |
| "grad_norm": 0.6962618827819824, | |
| "learning_rate": 2.5941179442428864e-06, | |
| "loss": 0.102, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 3.3859649122807016, | |
| "grad_norm": 0.4976450502872467, | |
| "learning_rate": 2.580677582361208e-06, | |
| "loss": 0.1011, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 3.3947368421052633, | |
| "grad_norm": 0.5283737182617188, | |
| "learning_rate": 2.5672348858075053e-06, | |
| "loss": 0.1057, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 3.4035087719298245, | |
| "grad_norm": 0.32338738441467285, | |
| "learning_rate": 2.553790243590556e-06, | |
| "loss": 0.1015, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 3.412280701754386, | |
| "grad_norm": 0.7909435629844666, | |
| "learning_rate": 2.5403440447754385e-06, | |
| "loss": 0.1036, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 3.4210526315789473, | |
| "grad_norm": 0.6297115087509155, | |
| "learning_rate": 2.5268966784722792e-06, | |
| "loss": 0.1042, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 3.4298245614035086, | |
| "grad_norm": 0.32988762855529785, | |
| "learning_rate": 2.513448533824988e-06, | |
| "loss": 0.1059, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 3.43859649122807, | |
| "grad_norm": 0.9211220145225525, | |
| "learning_rate": 2.5e-06, | |
| "loss": 0.1015, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 3.4473684210526314, | |
| "grad_norm": 1.2157588005065918, | |
| "learning_rate": 2.486551466175013e-06, | |
| "loss": 0.1035, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 3.456140350877193, | |
| "grad_norm": 0.4786648452281952, | |
| "learning_rate": 2.4731033215277216e-06, | |
| "loss": 0.1026, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 3.4649122807017543, | |
| "grad_norm": 0.37398242950439453, | |
| "learning_rate": 2.4596559552245623e-06, | |
| "loss": 0.1044, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 3.473684210526316, | |
| "grad_norm": 0.5536217093467712, | |
| "learning_rate": 2.446209756409445e-06, | |
| "loss": 0.1043, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 3.482456140350877, | |
| "grad_norm": 0.708406925201416, | |
| "learning_rate": 2.432765114192495e-06, | |
| "loss": 0.1046, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 3.4912280701754383, | |
| "grad_norm": 0.7140893340110779, | |
| "learning_rate": 2.4193224176387926e-06, | |
| "loss": 0.1039, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 3.5, | |
| "grad_norm": 0.8078088760375977, | |
| "learning_rate": 2.4058820557571144e-06, | |
| "loss": 0.1013, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 3.5087719298245617, | |
| "grad_norm": 0.7129591107368469, | |
| "learning_rate": 2.3924444174886735e-06, | |
| "loss": 0.1057, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 3.517543859649123, | |
| "grad_norm": 1.293412446975708, | |
| "learning_rate": 2.37900989169587e-06, | |
| "loss": 0.1081, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 3.526315789473684, | |
| "grad_norm": 0.7235314249992371, | |
| "learning_rate": 2.3655788671510314e-06, | |
| "loss": 0.1054, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 3.5350877192982457, | |
| "grad_norm": 0.6008841395378113, | |
| "learning_rate": 2.3521517325251637e-06, | |
| "loss": 0.1033, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 3.543859649122807, | |
| "grad_norm": 0.6819609999656677, | |
| "learning_rate": 2.3387288763767097e-06, | |
| "loss": 0.1019, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 3.5526315789473686, | |
| "grad_norm": 0.5696406960487366, | |
| "learning_rate": 2.325310687140296e-06, | |
| "loss": 0.1043, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 3.56140350877193, | |
| "grad_norm": 0.8597077131271362, | |
| "learning_rate": 2.3118975531155003e-06, | |
| "loss": 0.1037, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 3.5701754385964914, | |
| "grad_norm": 0.43985217809677124, | |
| "learning_rate": 2.2984898624556075e-06, | |
| "loss": 0.105, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 3.5789473684210527, | |
| "grad_norm": 0.5448469519615173, | |
| "learning_rate": 2.2850880031563845e-06, | |
| "loss": 0.1037, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 3.587719298245614, | |
| "grad_norm": 0.8221977949142456, | |
| "learning_rate": 2.271692363044845e-06, | |
| "loss": 0.1015, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 3.5964912280701755, | |
| "grad_norm": 0.9838594198226929, | |
| "learning_rate": 2.2583033297680316e-06, | |
| "loss": 0.1085, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 3.6052631578947367, | |
| "grad_norm": 1.034848928451538, | |
| "learning_rate": 2.2449212907817985e-06, | |
| "loss": 0.104, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 3.6140350877192984, | |
| "grad_norm": 1.0788371562957764, | |
| "learning_rate": 2.2315466333395927e-06, | |
| "loss": 0.1033, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 3.6228070175438596, | |
| "grad_norm": 0.49096915125846863, | |
| "learning_rate": 2.2181797444812557e-06, | |
| "loss": 0.1044, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 3.6315789473684212, | |
| "grad_norm": 1.309685230255127, | |
| "learning_rate": 2.204821011021815e-06, | |
| "loss": 0.1036, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 3.6403508771929824, | |
| "grad_norm": 0.5014146566390991, | |
| "learning_rate": 2.191470819540301e-06, | |
| "loss": 0.104, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 3.6491228070175437, | |
| "grad_norm": 0.770470380783081, | |
| "learning_rate": 2.178129556368548e-06, | |
| "loss": 0.1049, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 3.6578947368421053, | |
| "grad_norm": 0.4639376699924469, | |
| "learning_rate": 2.1647976075800235e-06, | |
| "loss": 0.1047, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 3.6666666666666665, | |
| "grad_norm": 1.101885437965393, | |
| "learning_rate": 2.151475358978652e-06, | |
| "loss": 0.1035, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 3.675438596491228, | |
| "grad_norm": 0.5644329786300659, | |
| "learning_rate": 2.138163196087648e-06, | |
| "loss": 0.103, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 3.6842105263157894, | |
| "grad_norm": 1.1015008687973022, | |
| "learning_rate": 2.1248615041383686e-06, | |
| "loss": 0.1054, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 3.692982456140351, | |
| "grad_norm": 0.7311366200447083, | |
| "learning_rate": 2.111570668059155e-06, | |
| "loss": 0.1043, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 3.7017543859649122, | |
| "grad_norm": 0.38242173194885254, | |
| "learning_rate": 2.098291072464199e-06, | |
| "loss": 0.1041, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 3.7105263157894735, | |
| "grad_norm": 1.231512188911438, | |
| "learning_rate": 2.085023101642412e-06, | |
| "loss": 0.1021, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 3.719298245614035, | |
| "grad_norm": 0.41761213541030884, | |
| "learning_rate": 2.0717671395463063e-06, | |
| "loss": 0.1062, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 3.7280701754385968, | |
| "grad_norm": 0.4593309462070465, | |
| "learning_rate": 2.0585235697808794e-06, | |
| "loss": 0.1012, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 3.736842105263158, | |
| "grad_norm": 0.9147135019302368, | |
| "learning_rate": 2.0452927755925155e-06, | |
| "loss": 0.1046, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 3.745614035087719, | |
| "grad_norm": 0.39639535546302795, | |
| "learning_rate": 2.0320751398578984e-06, | |
| "loss": 0.1018, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 3.754385964912281, | |
| "grad_norm": 0.688010573387146, | |
| "learning_rate": 2.0188710450729255e-06, | |
| "loss": 0.104, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 3.763157894736842, | |
| "grad_norm": 0.5140353441238403, | |
| "learning_rate": 2.005680873341644e-06, | |
| "loss": 0.1033, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 3.7719298245614032, | |
| "grad_norm": 0.5970481634140015, | |
| "learning_rate": 1.992505006365191e-06, | |
| "loss": 0.1044, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 3.780701754385965, | |
| "grad_norm": 0.551162838935852, | |
| "learning_rate": 1.9793438254307496e-06, | |
| "loss": 0.1042, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 3.7894736842105265, | |
| "grad_norm": 0.5344637632369995, | |
| "learning_rate": 1.96619771140051e-06, | |
| "loss": 0.1042, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 3.7982456140350878, | |
| "grad_norm": 0.5357667207717896, | |
| "learning_rate": 1.9530670447006566e-06, | |
| "loss": 0.101, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 3.807017543859649, | |
| "grad_norm": 1.2536660432815552, | |
| "learning_rate": 1.9399522053103514e-06, | |
| "loss": 0.1008, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 3.8157894736842106, | |
| "grad_norm": 0.4888289272785187, | |
| "learning_rate": 1.926853572750741e-06, | |
| "loss": 0.1028, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 3.824561403508772, | |
| "grad_norm": 0.5810404419898987, | |
| "learning_rate": 1.913771526073976e-06, | |
| "loss": 0.1031, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 3.8333333333333335, | |
| "grad_norm": 0.5372979044914246, | |
| "learning_rate": 1.9007064438522374e-06, | |
| "loss": 0.107, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 3.8421052631578947, | |
| "grad_norm": 0.8293616771697998, | |
| "learning_rate": 1.8876587041667855e-06, | |
| "loss": 0.1033, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 3.8508771929824563, | |
| "grad_norm": 2.361504554748535, | |
| "learning_rate": 1.8746286845970145e-06, | |
| "loss": 0.1098, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 3.8596491228070176, | |
| "grad_norm": 0.70230633020401, | |
| "learning_rate": 1.8616167622095328e-06, | |
| "loss": 0.1034, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 3.8684210526315788, | |
| "grad_norm": 0.6323564052581787, | |
| "learning_rate": 1.8486233135472436e-06, | |
| "loss": 0.1058, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 3.8771929824561404, | |
| "grad_norm": 0.48205408453941345, | |
| "learning_rate": 1.8356487146184517e-06, | |
| "loss": 0.105, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 3.8859649122807016, | |
| "grad_norm": 0.6996872425079346, | |
| "learning_rate": 1.8226933408859864e-06, | |
| "loss": 0.1083, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 3.8947368421052633, | |
| "grad_norm": 0.4114651679992676, | |
| "learning_rate": 1.8097575672563278e-06, | |
| "loss": 0.1003, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 3.9035087719298245, | |
| "grad_norm": 0.5234648585319519, | |
| "learning_rate": 1.7968417680687666e-06, | |
| "loss": 0.1019, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 3.912280701754386, | |
| "grad_norm": 1.0571491718292236, | |
| "learning_rate": 1.7839463170845641e-06, | |
| "loss": 0.1003, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 3.9210526315789473, | |
| "grad_norm": 0.7470094561576843, | |
| "learning_rate": 1.7710715874761408e-06, | |
| "loss": 0.1061, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 3.9298245614035086, | |
| "grad_norm": 0.901695191860199, | |
| "learning_rate": 1.7582179518162742e-06, | |
| "loss": 0.1015, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 3.93859649122807, | |
| "grad_norm": 1.0251179933547974, | |
| "learning_rate": 1.7453857820673215e-06, | |
| "loss": 0.1, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 3.9473684210526314, | |
| "grad_norm": 0.5065406560897827, | |
| "learning_rate": 1.7325754495704508e-06, | |
| "loss": 0.1036, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 3.956140350877193, | |
| "grad_norm": 0.9541155099868774, | |
| "learning_rate": 1.7197873250348962e-06, | |
| "loss": 0.1015, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 3.9649122807017543, | |
| "grad_norm": 0.6264199018478394, | |
| "learning_rate": 1.7070217785272354e-06, | |
| "loss": 0.1026, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 3.973684210526316, | |
| "grad_norm": 0.6260526180267334, | |
| "learning_rate": 1.6942791794606716e-06, | |
| "loss": 0.1039, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 3.982456140350877, | |
| "grad_norm": 0.4730931222438812, | |
| "learning_rate": 1.681559896584352e-06, | |
| "loss": 0.1045, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 3.9912280701754383, | |
| "grad_norm": 0.5011451840400696, | |
| "learning_rate": 1.668864297972689e-06, | |
| "loss": 0.1062, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "grad_norm": 1.0113046169281006, | |
| "learning_rate": 1.6561927510147172e-06, | |
| "loss": 0.1005, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 4.008771929824562, | |
| "grad_norm": 0.6017364263534546, | |
| "learning_rate": 1.6435456224034536e-06, | |
| "loss": 0.1042, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 4.017543859649122, | |
| "grad_norm": 0.6874931454658508, | |
| "learning_rate": 1.63092327812529e-06, | |
| "loss": 0.102, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 4.026315789473684, | |
| "grad_norm": 1.311024785041809, | |
| "learning_rate": 1.6183260834494053e-06, | |
| "loss": 0.1063, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 4.035087719298246, | |
| "grad_norm": 0.3640352785587311, | |
| "learning_rate": 1.6057544029171863e-06, | |
| "loss": 0.1039, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 4.043859649122807, | |
| "grad_norm": 0.6056526303291321, | |
| "learning_rate": 1.5932086003316893e-06, | |
| "loss": 0.099, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 4.052631578947368, | |
| "grad_norm": 0.5407683849334717, | |
| "learning_rate": 1.5806890387471025e-06, | |
| "loss": 0.1038, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 4.06140350877193, | |
| "grad_norm": 0.7054030895233154, | |
| "learning_rate": 1.5681960804582474e-06, | |
| "loss": 0.1001, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 4.0701754385964914, | |
| "grad_norm": 0.8736140727996826, | |
| "learning_rate": 1.5557300869900876e-06, | |
| "loss": 0.1035, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 4.078947368421052, | |
| "grad_norm": 0.6689419746398926, | |
| "learning_rate": 1.5432914190872757e-06, | |
| "loss": 0.1052, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 4.087719298245614, | |
| "grad_norm": 0.8937819600105286, | |
| "learning_rate": 1.530880436703705e-06, | |
| "loss": 0.1024, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 4.0964912280701755, | |
| "grad_norm": 0.24332484602928162, | |
| "learning_rate": 1.518497498992097e-06, | |
| "loss": 0.0984, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 4.105263157894737, | |
| "grad_norm": 0.9716914296150208, | |
| "learning_rate": 1.5061429642936107e-06, | |
| "loss": 0.1012, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 4.114035087719298, | |
| "grad_norm": 0.5864392518997192, | |
| "learning_rate": 1.4938171901274678e-06, | |
| "loss": 0.1029, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 4.12280701754386, | |
| "grad_norm": 0.4616212546825409, | |
| "learning_rate": 1.4815205331806113e-06, | |
| "loss": 0.1035, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 4.131578947368421, | |
| "grad_norm": 0.5989730954170227, | |
| "learning_rate": 1.4692533492973775e-06, | |
| "loss": 0.1036, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 4.140350877192983, | |
| "grad_norm": 0.7900629639625549, | |
| "learning_rate": 1.4570159934692085e-06, | |
| "loss": 0.1044, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 4.149122807017544, | |
| "grad_norm": 0.5659995675086975, | |
| "learning_rate": 1.4448088198243668e-06, | |
| "loss": 0.1024, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 4.157894736842105, | |
| "grad_norm": 0.7867873311042786, | |
| "learning_rate": 1.432632181617698e-06, | |
| "loss": 0.1038, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 4.166666666666667, | |
| "grad_norm": 0.44385358691215515, | |
| "learning_rate": 1.4204864312204033e-06, | |
| "loss": 0.1006, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 4.175438596491228, | |
| "grad_norm": 0.3909265697002411, | |
| "learning_rate": 1.4083719201098404e-06, | |
| "loss": 0.1019, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 4.184210526315789, | |
| "grad_norm": 0.7079223990440369, | |
| "learning_rate": 1.3962889988593609e-06, | |
| "loss": 0.1019, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 4.192982456140351, | |
| "grad_norm": 0.6703695058822632, | |
| "learning_rate": 1.3842380171281522e-06, | |
| "loss": 0.1063, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 4.201754385964913, | |
| "grad_norm": 0.3477051556110382, | |
| "learning_rate": 1.3722193236511344e-06, | |
| "loss": 0.1004, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 4.2105263157894735, | |
| "grad_norm": 0.7296048402786255, | |
| "learning_rate": 1.3602332662288536e-06, | |
| "loss": 0.1057, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 4.219298245614035, | |
| "grad_norm": 0.7007803916931152, | |
| "learning_rate": 1.348280191717427e-06, | |
| "loss": 0.1007, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 4.228070175438597, | |
| "grad_norm": 0.948968231678009, | |
| "learning_rate": 1.3363604460185031e-06, | |
| "loss": 0.1005, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 4.2368421052631575, | |
| "grad_norm": 0.6567812561988831, | |
| "learning_rate": 1.3244743740692496e-06, | |
| "loss": 0.1016, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 4.245614035087719, | |
| "grad_norm": 0.5390146374702454, | |
| "learning_rate": 1.3126223198323752e-06, | |
| "loss": 0.1025, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 4.254385964912281, | |
| "grad_norm": 0.43638724088668823, | |
| "learning_rate": 1.3008046262861696e-06, | |
| "loss": 0.1053, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 4.2631578947368425, | |
| "grad_norm": 0.43589839339256287, | |
| "learning_rate": 1.289021635414589e-06, | |
| "loss": 0.1036, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 4.271929824561403, | |
| "grad_norm": 0.3999694585800171, | |
| "learning_rate": 1.277273688197346e-06, | |
| "loss": 0.1023, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 4.280701754385965, | |
| "grad_norm": 0.6314297914505005, | |
| "learning_rate": 1.265561124600057e-06, | |
| "loss": 0.0993, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 4.2894736842105265, | |
| "grad_norm": 0.566033124923706, | |
| "learning_rate": 1.2538842835643906e-06, | |
| "loss": 0.1029, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 4.298245614035087, | |
| "grad_norm": 0.6713336110115051, | |
| "learning_rate": 1.2422435029982669e-06, | |
| "loss": 0.1002, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 4.307017543859649, | |
| "grad_norm": 0.428574800491333, | |
| "learning_rate": 1.2306391197660797e-06, | |
| "loss": 0.1028, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 4.315789473684211, | |
| "grad_norm": 0.637745201587677, | |
| "learning_rate": 1.219071469678941e-06, | |
| "loss": 0.1009, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 4.324561403508772, | |
| "grad_norm": 0.8204445242881775, | |
| "learning_rate": 1.2075408874849747e-06, | |
| "loss": 0.099, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 4.333333333333333, | |
| "grad_norm": 1.010758876800537, | |
| "learning_rate": 1.1960477068596155e-06, | |
| "loss": 0.1006, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 4.342105263157895, | |
| "grad_norm": 0.908112108707428, | |
| "learning_rate": 1.1845922603959677e-06, | |
| "loss": 0.1047, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 4.350877192982456, | |
| "grad_norm": 1.0254642963409424, | |
| "learning_rate": 1.173174879595166e-06, | |
| "loss": 0.0991, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 4.359649122807017, | |
| "grad_norm": 0.5159414410591125, | |
| "learning_rate": 1.1617958948567967e-06, | |
| "loss": 0.0978, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 4.368421052631579, | |
| "grad_norm": 0.9525816440582275, | |
| "learning_rate": 1.1504556354693227e-06, | |
| "loss": 0.1051, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 4.37719298245614, | |
| "grad_norm": 0.9321548938751221, | |
| "learning_rate": 1.1391544296005652e-06, | |
| "loss": 0.1011, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 4.385964912280702, | |
| "grad_norm": 0.7308889627456665, | |
| "learning_rate": 1.1278926042882026e-06, | |
| "loss": 0.1002, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 4.394736842105263, | |
| "grad_norm": 0.9508903622627258, | |
| "learning_rate": 1.116670485430304e-06, | |
| "loss": 0.1013, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 4.4035087719298245, | |
| "grad_norm": 0.5174031853675842, | |
| "learning_rate": 1.1054883977759067e-06, | |
| "loss": 0.104, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 4.412280701754386, | |
| "grad_norm": 0.4504610598087311, | |
| "learning_rate": 1.0943466649156061e-06, | |
| "loss": 0.1013, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 4.421052631578947, | |
| "grad_norm": 0.5650261044502258, | |
| "learning_rate": 1.0832456092722063e-06, | |
| "loss": 0.0995, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 4.4298245614035086, | |
| "grad_norm": 0.37759432196617126, | |
| "learning_rate": 1.0721855520913751e-06, | |
| "loss": 0.1058, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 4.43859649122807, | |
| "grad_norm": 0.7238495349884033, | |
| "learning_rate": 1.0611668134323577e-06, | |
| "loss": 0.1012, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 4.447368421052632, | |
| "grad_norm": 0.6301494240760803, | |
| "learning_rate": 1.0501897121587127e-06, | |
| "loss": 0.1009, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 4.456140350877193, | |
| "grad_norm": 0.9531002044677734, | |
| "learning_rate": 1.0392545659290789e-06, | |
| "loss": 0.1021, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 4.464912280701754, | |
| "grad_norm": 0.4423767924308777, | |
| "learning_rate": 1.0283616911879943e-06, | |
| "loss": 0.1024, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 4.473684210526316, | |
| "grad_norm": 0.5573019981384277, | |
| "learning_rate": 1.0175114031567246e-06, | |
| "loss": 0.1011, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 4.482456140350878, | |
| "grad_norm": 0.9792631268501282, | |
| "learning_rate": 1.0067040158241555e-06, | |
| "loss": 0.1039, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 4.491228070175438, | |
| "grad_norm": 1.7911303043365479, | |
| "learning_rate": 9.95939841937693e-07, | |
| "loss": 0.104, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 4.5, | |
| "grad_norm": 0.5825617909431458, | |
| "learning_rate": 9.852191929942262e-07, | |
| "loss": 0.0987, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 4.508771929824562, | |
| "grad_norm": 0.3129921555519104, | |
| "learning_rate": 9.745423792310996e-07, | |
| "loss": 0.0979, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 4.517543859649123, | |
| "grad_norm": 0.5376678705215454, | |
| "learning_rate": 9.63909709617146e-07, | |
| "loss": 0.0998, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 4.526315789473684, | |
| "grad_norm": 0.48920008540153503, | |
| "learning_rate": 9.533214918437422e-07, | |
| "loss": 0.1017, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 4.535087719298246, | |
| "grad_norm": 0.36829131841659546, | |
| "learning_rate": 9.427780323159006e-07, | |
| "loss": 0.1004, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 4.543859649122807, | |
| "grad_norm": 0.5459544658660889, | |
| "learning_rate": 9.322796361434111e-07, | |
| "loss": 0.1041, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 4.552631578947368, | |
| "grad_norm": 0.8460657000541687, | |
| "learning_rate": 9.218266071320015e-07, | |
| "loss": 0.1012, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 4.56140350877193, | |
| "grad_norm": 0.7692683339118958, | |
| "learning_rate": 9.114192477745568e-07, | |
| "loss": 0.1013, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 4.5701754385964914, | |
| "grad_norm": 0.4503592550754547, | |
| "learning_rate": 9.010578592423544e-07, | |
| "loss": 0.107, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 4.578947368421053, | |
| "grad_norm": 0.9348855018615723, | |
| "learning_rate": 8.907427413763572e-07, | |
| "loss": 0.102, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 4.587719298245614, | |
| "grad_norm": 0.7902988791465759, | |
| "learning_rate": 8.804741926785335e-07, | |
| "loss": 0.1032, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 4.5964912280701755, | |
| "grad_norm": 0.5444673299789429, | |
| "learning_rate": 8.702525103032186e-07, | |
| "loss": 0.0993, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 4.605263157894737, | |
| "grad_norm": 0.728112518787384, | |
| "learning_rate": 8.60077990048517e-07, | |
| "loss": 0.1021, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 4.614035087719298, | |
| "grad_norm": 0.5250695943832397, | |
| "learning_rate": 8.499509263477388e-07, | |
| "loss": 0.1018, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 4.62280701754386, | |
| "grad_norm": 0.3112829625606537, | |
| "learning_rate": 8.398716122608868e-07, | |
| "loss": 0.1037, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 4.631578947368421, | |
| "grad_norm": 0.9097342491149902, | |
| "learning_rate": 8.298403394661658e-07, | |
| "loss": 0.1015, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 4.640350877192983, | |
| "grad_norm": 0.6663810014724731, | |
| "learning_rate": 8.198573982515537e-07, | |
| "loss": 0.1038, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 4.649122807017544, | |
| "grad_norm": 1.1880309581756592, | |
| "learning_rate": 8.099230775063879e-07, | |
| "loss": 0.1044, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 4.657894736842105, | |
| "grad_norm": 0.6492993831634521, | |
| "learning_rate": 8.000376647130165e-07, | |
| "loss": 0.103, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 4.666666666666667, | |
| "grad_norm": 0.43723204731941223, | |
| "learning_rate": 7.902014459384744e-07, | |
| "loss": 0.1025, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 4.675438596491228, | |
| "grad_norm": 0.8422684669494629, | |
| "learning_rate": 7.804147058262015e-07, | |
| "loss": 0.1035, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 4.684210526315789, | |
| "grad_norm": 0.6502094268798828, | |
| "learning_rate": 7.706777275878161e-07, | |
| "loss": 0.0994, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 4.692982456140351, | |
| "grad_norm": 0.5709391236305237, | |
| "learning_rate": 7.609907929949045e-07, | |
| "loss": 0.1056, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 4.701754385964913, | |
| "grad_norm": 0.4126770496368408, | |
| "learning_rate": 7.513541823708828e-07, | |
| "loss": 0.101, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 4.7105263157894735, | |
| "grad_norm": 0.5016621947288513, | |
| "learning_rate": 7.417681745828706e-07, | |
| "loss": 0.0999, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 4.719298245614035, | |
| "grad_norm": 0.8139487504959106, | |
| "learning_rate": 7.322330470336314e-07, | |
| "loss": 0.0984, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 4.728070175438597, | |
| "grad_norm": 0.5805723667144775, | |
| "learning_rate": 7.227490756535396e-07, | |
| "loss": 0.1011, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 4.7368421052631575, | |
| "grad_norm": 0.7970795631408691, | |
| "learning_rate": 7.133165348925978e-07, | |
| "loss": 0.1016, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 4.745614035087719, | |
| "grad_norm": 0.6336880326271057, | |
| "learning_rate": 7.039356977124937e-07, | |
| "loss": 0.1027, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 4.754385964912281, | |
| "grad_norm": 0.2953254282474518, | |
| "learning_rate": 6.946068355786992e-07, | |
| "loss": 0.1022, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 4.7631578947368425, | |
| "grad_norm": 0.5646472573280334, | |
| "learning_rate": 6.853302184526217e-07, | |
| "loss": 0.0998, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 4.771929824561403, | |
| "grad_norm": 0.6545483469963074, | |
| "learning_rate": 6.761061147837808e-07, | |
| "loss": 0.0985, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 4.780701754385965, | |
| "grad_norm": 0.8741705417633057, | |
| "learning_rate": 6.669347915020524e-07, | |
| "loss": 0.1006, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 4.7894736842105265, | |
| "grad_norm": 0.8579487204551697, | |
| "learning_rate": 6.578165140099318e-07, | |
| "loss": 0.1037, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 4.798245614035087, | |
| "grad_norm": 1.0744833946228027, | |
| "learning_rate": 6.487515461748631e-07, | |
| "loss": 0.1017, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 4.807017543859649, | |
| "grad_norm": 0.4954414367675781, | |
| "learning_rate": 6.397401503215992e-07, | |
| "loss": 0.1006, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 4.815789473684211, | |
| "grad_norm": 0.525191068649292, | |
| "learning_rate": 6.307825872246076e-07, | |
| "loss": 0.1024, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 4.824561403508772, | |
| "grad_norm": 0.8922368288040161, | |
| "learning_rate": 6.218791161005336e-07, | |
| "loss": 0.0999, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 4.833333333333333, | |
| "grad_norm": 0.6471604704856873, | |
| "learning_rate": 6.13029994600686e-07, | |
| "loss": 0.0994, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 4.842105263157895, | |
| "grad_norm": 0.49826696515083313, | |
| "learning_rate": 6.042354788035943e-07, | |
| "loss": 0.1003, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 4.850877192982456, | |
| "grad_norm": 0.7908043265342712, | |
| "learning_rate": 5.954958232075858e-07, | |
| "loss": 0.1003, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 4.859649122807017, | |
| "grad_norm": 0.40011560916900635, | |
| "learning_rate": 5.868112807234313e-07, | |
| "loss": 0.0991, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 4.868421052631579, | |
| "grad_norm": 0.9797350764274597, | |
| "learning_rate": 5.781821026670203e-07, | |
| "loss": 0.1005, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 4.87719298245614, | |
| "grad_norm": 0.4581677317619324, | |
| "learning_rate": 5.696085387520894e-07, | |
| "loss": 0.1013, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 4.885964912280702, | |
| "grad_norm": 0.6596454381942749, | |
| "learning_rate": 5.610908370829981e-07, | |
| "loss": 0.1028, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 4.894736842105263, | |
| "grad_norm": 0.5106292963027954, | |
| "learning_rate": 5.526292441475448e-07, | |
| "loss": 0.1023, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 4.9035087719298245, | |
| "grad_norm": 0.5137461423873901, | |
| "learning_rate": 5.442240048098402e-07, | |
| "loss": 0.1036, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 4.912280701754386, | |
| "grad_norm": 0.4619182348251343, | |
| "learning_rate": 5.358753623032137e-07, | |
| "loss": 0.0979, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 4.921052631578947, | |
| "grad_norm": 0.5350770354270935, | |
| "learning_rate": 5.275835582231833e-07, | |
| "loss": 0.0992, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 4.9298245614035086, | |
| "grad_norm": 0.7599822878837585, | |
| "learning_rate": 5.193488325204551e-07, | |
| "loss": 0.0983, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 4.93859649122807, | |
| "grad_norm": 0.47537004947662354, | |
| "learning_rate": 5.111714234939868e-07, | |
| "loss": 0.1004, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 4.947368421052632, | |
| "grad_norm": 0.597273588180542, | |
| "learning_rate": 5.030515677840883e-07, | |
| "loss": 0.1015, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 4.956140350877193, | |
| "grad_norm": 0.7155528664588928, | |
| "learning_rate": 4.949895003655728e-07, | |
| "loss": 0.1017, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 4.964912280701754, | |
| "grad_norm": 0.530358612537384, | |
| "learning_rate": 4.869854545409627e-07, | |
| "loss": 0.0998, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 4.973684210526316, | |
| "grad_norm": 0.6721721291542053, | |
| "learning_rate": 4.790396619337286e-07, | |
| "loss": 0.1003, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 4.982456140350877, | |
| "grad_norm": 0.8486731648445129, | |
| "learning_rate": 4.711523524815978e-07, | |
| "loss": 0.0996, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 4.991228070175438, | |
| "grad_norm": 0.7072808742523193, | |
| "learning_rate": 4.633237544298891e-07, | |
| "loss": 0.1004, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "grad_norm": 0.41283953189849854, | |
| "learning_rate": 4.555540943249187e-07, | |
| "loss": 0.1026, | |
| "step": 570 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 684, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 6, | |
| "save_steps": 114, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 1.72999503707426e+19, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |