| { | |
| "best_global_step": 684, | |
| "best_metric": 0.4992051124572754, | |
| "best_model_checkpoint": "outputs/checkpoint-684", | |
| "epoch": 1.0, | |
| "eval_steps": 500, | |
| "global_step": 684, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0014619883040935672, | |
| "grad_norm": 1.7495784759521484, | |
| "learning_rate": 0.0, | |
| "loss": 0.425, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0029239766081871343, | |
| "grad_norm": 1.9972009658813477, | |
| "learning_rate": 1.4619883040935672e-07, | |
| "loss": 0.5992, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.0043859649122807015, | |
| "grad_norm": 1.8668241500854492, | |
| "learning_rate": 2.9239766081871344e-07, | |
| "loss": 0.4872, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.005847953216374269, | |
| "grad_norm": 2.140504837036133, | |
| "learning_rate": 4.385964912280702e-07, | |
| "loss": 0.5045, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.007309941520467836, | |
| "grad_norm": 1.6007329225540161, | |
| "learning_rate": 5.847953216374269e-07, | |
| "loss": 0.4394, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.008771929824561403, | |
| "grad_norm": 1.6368324756622314, | |
| "learning_rate": 7.309941520467836e-07, | |
| "loss": 0.3499, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.01023391812865497, | |
| "grad_norm": 2.5845789909362793, | |
| "learning_rate": 8.771929824561404e-07, | |
| "loss": 0.4349, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.011695906432748537, | |
| "grad_norm": 2.194718360900879, | |
| "learning_rate": 1.0233918128654972e-06, | |
| "loss": 0.5503, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.013157894736842105, | |
| "grad_norm": 1.9716426134109497, | |
| "learning_rate": 1.1695906432748538e-06, | |
| "loss": 0.4435, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.014619883040935672, | |
| "grad_norm": 2.026991605758667, | |
| "learning_rate": 1.3157894736842106e-06, | |
| "loss": 0.5627, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.01608187134502924, | |
| "grad_norm": 1.5689380168914795, | |
| "learning_rate": 1.4619883040935671e-06, | |
| "loss": 0.3364, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.017543859649122806, | |
| "grad_norm": 1.644498348236084, | |
| "learning_rate": 1.608187134502924e-06, | |
| "loss": 0.3911, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.019005847953216373, | |
| "grad_norm": 1.3109540939331055, | |
| "learning_rate": 1.7543859649122807e-06, | |
| "loss": 0.3462, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.02046783625730994, | |
| "grad_norm": 3.642599105834961, | |
| "learning_rate": 1.9005847953216373e-06, | |
| "loss": 0.4644, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.021929824561403508, | |
| "grad_norm": 2.097911834716797, | |
| "learning_rate": 2.0467836257309943e-06, | |
| "loss": 0.4397, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.023391812865497075, | |
| "grad_norm": 2.2044146060943604, | |
| "learning_rate": 2.1929824561403507e-06, | |
| "loss": 0.4713, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.024853801169590642, | |
| "grad_norm": 2.130998134613037, | |
| "learning_rate": 2.3391812865497075e-06, | |
| "loss": 0.7038, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.02631578947368421, | |
| "grad_norm": 1.7702172994613647, | |
| "learning_rate": 2.4853801169590643e-06, | |
| "loss": 0.4186, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.027777777777777776, | |
| "grad_norm": 2.0703587532043457, | |
| "learning_rate": 2.631578947368421e-06, | |
| "loss": 0.4122, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.029239766081871343, | |
| "grad_norm": 1.802437424659729, | |
| "learning_rate": 2.777777777777778e-06, | |
| "loss": 0.4731, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.03070175438596491, | |
| "grad_norm": 1.637001633644104, | |
| "learning_rate": 2.9239766081871343e-06, | |
| "loss": 0.4402, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.03216374269005848, | |
| "grad_norm": 1.8240504264831543, | |
| "learning_rate": 3.070175438596491e-06, | |
| "loss": 0.4405, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.033625730994152045, | |
| "grad_norm": 1.52961003780365, | |
| "learning_rate": 3.216374269005848e-06, | |
| "loss": 0.4439, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.03508771929824561, | |
| "grad_norm": 1.3200383186340332, | |
| "learning_rate": 3.3625730994152047e-06, | |
| "loss": 0.3027, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.03654970760233918, | |
| "grad_norm": 1.5211174488067627, | |
| "learning_rate": 3.5087719298245615e-06, | |
| "loss": 0.3538, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.038011695906432746, | |
| "grad_norm": 1.7521167993545532, | |
| "learning_rate": 3.6549707602339183e-06, | |
| "loss": 0.3215, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.039473684210526314, | |
| "grad_norm": 2.1831953525543213, | |
| "learning_rate": 3.8011695906432747e-06, | |
| "loss": 0.5326, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.04093567251461988, | |
| "grad_norm": 1.9375377893447876, | |
| "learning_rate": 3.9473684210526315e-06, | |
| "loss": 0.3058, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.04239766081871345, | |
| "grad_norm": 1.3378911018371582, | |
| "learning_rate": 4.093567251461989e-06, | |
| "loss": 0.3124, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.043859649122807015, | |
| "grad_norm": 2.0896005630493164, | |
| "learning_rate": 4.239766081871345e-06, | |
| "loss": 0.4827, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.04532163742690058, | |
| "grad_norm": 1.5062603950500488, | |
| "learning_rate": 4.3859649122807014e-06, | |
| "loss": 0.3157, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.04678362573099415, | |
| "grad_norm": 1.947313904762268, | |
| "learning_rate": 4.532163742690059e-06, | |
| "loss": 0.4191, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.04824561403508772, | |
| "grad_norm": 1.9071850776672363, | |
| "learning_rate": 4.678362573099415e-06, | |
| "loss": 0.4158, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.049707602339181284, | |
| "grad_norm": 1.2716987133026123, | |
| "learning_rate": 4.824561403508772e-06, | |
| "loss": 0.2569, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.05116959064327485, | |
| "grad_norm": 1.7718721628189087, | |
| "learning_rate": 4.970760233918129e-06, | |
| "loss": 0.3228, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.05263157894736842, | |
| "grad_norm": 1.8403756618499756, | |
| "learning_rate": 5.116959064327485e-06, | |
| "loss": 0.3864, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.054093567251461985, | |
| "grad_norm": 1.7765403985977173, | |
| "learning_rate": 5.263157894736842e-06, | |
| "loss": 0.3609, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.05555555555555555, | |
| "grad_norm": 1.4612114429473877, | |
| "learning_rate": 5.409356725146199e-06, | |
| "loss": 0.3059, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.05701754385964912, | |
| "grad_norm": 1.8746145963668823, | |
| "learning_rate": 5.555555555555556e-06, | |
| "loss": 0.4677, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.05847953216374269, | |
| "grad_norm": 2.311183452606201, | |
| "learning_rate": 5.701754385964912e-06, | |
| "loss": 0.5493, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.059941520467836254, | |
| "grad_norm": 1.568487286567688, | |
| "learning_rate": 5.8479532163742686e-06, | |
| "loss": 0.3327, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.06140350877192982, | |
| "grad_norm": 2.155261516571045, | |
| "learning_rate": 5.994152046783626e-06, | |
| "loss": 0.3829, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.06286549707602339, | |
| "grad_norm": 2.0299155712127686, | |
| "learning_rate": 6.140350877192982e-06, | |
| "loss": 0.3032, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.06432748538011696, | |
| "grad_norm": 2.4522647857666016, | |
| "learning_rate": 6.286549707602339e-06, | |
| "loss": 0.5365, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.06578947368421052, | |
| "grad_norm": 1.9584052562713623, | |
| "learning_rate": 6.432748538011696e-06, | |
| "loss": 0.3653, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.06725146198830409, | |
| "grad_norm": 1.3375778198242188, | |
| "learning_rate": 6.578947368421053e-06, | |
| "loss": 0.2396, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.06871345029239766, | |
| "grad_norm": 2.1510448455810547, | |
| "learning_rate": 6.725146198830409e-06, | |
| "loss": 0.4654, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.07017543859649122, | |
| "grad_norm": 1.4656825065612793, | |
| "learning_rate": 6.871345029239766e-06, | |
| "loss": 0.2605, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.07163742690058479, | |
| "grad_norm": 2.095970869064331, | |
| "learning_rate": 7.017543859649123e-06, | |
| "loss": 0.4557, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.07309941520467836, | |
| "grad_norm": 1.7976460456848145, | |
| "learning_rate": 7.163742690058479e-06, | |
| "loss": 0.4151, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.07456140350877193, | |
| "grad_norm": 1.99152672290802, | |
| "learning_rate": 7.3099415204678366e-06, | |
| "loss": 0.3263, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.07602339181286549, | |
| "grad_norm": 1.84891676902771, | |
| "learning_rate": 7.456140350877193e-06, | |
| "loss": 0.4964, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.07748538011695906, | |
| "grad_norm": 2.088576078414917, | |
| "learning_rate": 7.602339181286549e-06, | |
| "loss": 0.3814, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.07894736842105263, | |
| "grad_norm": 1.83640456199646, | |
| "learning_rate": 7.748538011695907e-06, | |
| "loss": 0.2547, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.0804093567251462, | |
| "grad_norm": 2.1047003269195557, | |
| "learning_rate": 7.894736842105263e-06, | |
| "loss": 0.3693, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.08187134502923976, | |
| "grad_norm": 1.741404414176941, | |
| "learning_rate": 8.04093567251462e-06, | |
| "loss": 0.3956, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.08333333333333333, | |
| "grad_norm": 5.588784217834473, | |
| "learning_rate": 8.187134502923977e-06, | |
| "loss": 0.5578, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.0847953216374269, | |
| "grad_norm": 1.9800654649734497, | |
| "learning_rate": 8.333333333333334e-06, | |
| "loss": 0.4344, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.08625730994152046, | |
| "grad_norm": 1.7948042154312134, | |
| "learning_rate": 8.47953216374269e-06, | |
| "loss": 0.29, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.08771929824561403, | |
| "grad_norm": 1.9095481634140015, | |
| "learning_rate": 8.625730994152046e-06, | |
| "loss": 0.4691, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.0891812865497076, | |
| "grad_norm": 1.6817309856414795, | |
| "learning_rate": 8.771929824561403e-06, | |
| "loss": 0.3032, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.09064327485380116, | |
| "grad_norm": 1.9546353816986084, | |
| "learning_rate": 8.918128654970761e-06, | |
| "loss": 0.2903, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.09210526315789473, | |
| "grad_norm": 2.055668592453003, | |
| "learning_rate": 9.064327485380117e-06, | |
| "loss": 0.4438, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.0935672514619883, | |
| "grad_norm": 1.5767889022827148, | |
| "learning_rate": 9.210526315789474e-06, | |
| "loss": 0.2873, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.09502923976608187, | |
| "grad_norm": 1.682050347328186, | |
| "learning_rate": 9.35672514619883e-06, | |
| "loss": 0.2899, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.09649122807017543, | |
| "grad_norm": 1.6590290069580078, | |
| "learning_rate": 9.502923976608186e-06, | |
| "loss": 0.2476, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.097953216374269, | |
| "grad_norm": 1.7078795433044434, | |
| "learning_rate": 9.649122807017545e-06, | |
| "loss": 0.3166, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.09941520467836257, | |
| "grad_norm": 1.7867945432662964, | |
| "learning_rate": 9.795321637426901e-06, | |
| "loss": 0.3134, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.10087719298245613, | |
| "grad_norm": 1.6910769939422607, | |
| "learning_rate": 9.941520467836257e-06, | |
| "loss": 0.2943, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.1023391812865497, | |
| "grad_norm": 1.9768290519714355, | |
| "learning_rate": 1.0087719298245614e-05, | |
| "loss": 0.4493, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.10380116959064327, | |
| "grad_norm": 1.9877041578292847, | |
| "learning_rate": 1.023391812865497e-05, | |
| "loss": 0.4361, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.10526315789473684, | |
| "grad_norm": 2.2635536193847656, | |
| "learning_rate": 1.0380116959064328e-05, | |
| "loss": 0.3523, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.1067251461988304, | |
| "grad_norm": 2.345411777496338, | |
| "learning_rate": 1.0526315789473684e-05, | |
| "loss": 0.5638, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.10818713450292397, | |
| "grad_norm": 2.087989091873169, | |
| "learning_rate": 1.067251461988304e-05, | |
| "loss": 0.4163, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.10964912280701754, | |
| "grad_norm": 2.2147741317749023, | |
| "learning_rate": 1.0818713450292397e-05, | |
| "loss": 0.4489, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.1111111111111111, | |
| "grad_norm": 1.6369982957839966, | |
| "learning_rate": 1.0964912280701754e-05, | |
| "loss": 0.4014, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.11257309941520467, | |
| "grad_norm": 1.7678766250610352, | |
| "learning_rate": 1.1111111111111112e-05, | |
| "loss": 0.3449, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.11403508771929824, | |
| "grad_norm": 1.8219599723815918, | |
| "learning_rate": 1.1257309941520468e-05, | |
| "loss": 0.3628, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.1154970760233918, | |
| "grad_norm": 2.1565258502960205, | |
| "learning_rate": 1.1403508771929824e-05, | |
| "loss": 0.4629, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.11695906432748537, | |
| "grad_norm": 1.5876237154006958, | |
| "learning_rate": 1.154970760233918e-05, | |
| "loss": 0.3217, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.11842105263157894, | |
| "grad_norm": 2.255028486251831, | |
| "learning_rate": 1.1695906432748537e-05, | |
| "loss": 0.4575, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.11988304093567251, | |
| "grad_norm": 1.773084044456482, | |
| "learning_rate": 1.1842105263157895e-05, | |
| "loss": 0.3205, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.12134502923976608, | |
| "grad_norm": 2.2178328037261963, | |
| "learning_rate": 1.1988304093567252e-05, | |
| "loss": 0.3171, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.12280701754385964, | |
| "grad_norm": 2.338407278060913, | |
| "learning_rate": 1.2134502923976608e-05, | |
| "loss": 0.479, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.12426900584795321, | |
| "grad_norm": 2.733433961868286, | |
| "learning_rate": 1.2280701754385964e-05, | |
| "loss": 0.4127, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.12573099415204678, | |
| "grad_norm": 2.2404630184173584, | |
| "learning_rate": 1.242690058479532e-05, | |
| "loss": 0.4027, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.12719298245614036, | |
| "grad_norm": 2.113896608352661, | |
| "learning_rate": 1.2573099415204679e-05, | |
| "loss": 0.4118, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.1286549707602339, | |
| "grad_norm": 2.293775796890259, | |
| "learning_rate": 1.2719298245614037e-05, | |
| "loss": 0.4649, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.1301169590643275, | |
| "grad_norm": 1.4413833618164062, | |
| "learning_rate": 1.2865497076023392e-05, | |
| "loss": 0.2874, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.13157894736842105, | |
| "grad_norm": 1.6772958040237427, | |
| "learning_rate": 1.301169590643275e-05, | |
| "loss": 0.3649, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.13304093567251463, | |
| "grad_norm": 1.9642162322998047, | |
| "learning_rate": 1.3157894736842106e-05, | |
| "loss": 0.3622, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.13450292397660818, | |
| "grad_norm": 1.7148772478103638, | |
| "learning_rate": 1.3304093567251464e-05, | |
| "loss": 0.3246, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.13596491228070176, | |
| "grad_norm": 1.841139316558838, | |
| "learning_rate": 1.3450292397660819e-05, | |
| "loss": 0.4338, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.13742690058479531, | |
| "grad_norm": 1.8452824354171753, | |
| "learning_rate": 1.3596491228070177e-05, | |
| "loss": 0.349, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.1388888888888889, | |
| "grad_norm": 2.06516170501709, | |
| "learning_rate": 1.3742690058479531e-05, | |
| "loss": 0.3776, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.14035087719298245, | |
| "grad_norm": 1.886522889137268, | |
| "learning_rate": 1.388888888888889e-05, | |
| "loss": 0.4038, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.14181286549707603, | |
| "grad_norm": 1.5818755626678467, | |
| "learning_rate": 1.4035087719298246e-05, | |
| "loss": 0.2919, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.14327485380116958, | |
| "grad_norm": 1.4295921325683594, | |
| "learning_rate": 1.4181286549707604e-05, | |
| "loss": 0.2517, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.14473684210526316, | |
| "grad_norm": 1.7458497285842896, | |
| "learning_rate": 1.4327485380116959e-05, | |
| "loss": 0.3907, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.14619883040935672, | |
| "grad_norm": 2.005880832672119, | |
| "learning_rate": 1.4473684210526317e-05, | |
| "loss": 0.372, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.1476608187134503, | |
| "grad_norm": 1.6340547800064087, | |
| "learning_rate": 1.4619883040935673e-05, | |
| "loss": 0.3137, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.14912280701754385, | |
| "grad_norm": 1.4766933917999268, | |
| "learning_rate": 1.4766081871345031e-05, | |
| "loss": 0.3142, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.15058479532163743, | |
| "grad_norm": 2.0684752464294434, | |
| "learning_rate": 1.4912280701754386e-05, | |
| "loss": 0.3975, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.15204678362573099, | |
| "grad_norm": 1.809065580368042, | |
| "learning_rate": 1.5058479532163744e-05, | |
| "loss": 0.2827, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.15350877192982457, | |
| "grad_norm": 1.4468668699264526, | |
| "learning_rate": 1.5204678362573099e-05, | |
| "loss": 0.2391, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.15497076023391812, | |
| "grad_norm": 2.209812879562378, | |
| "learning_rate": 1.5350877192982457e-05, | |
| "loss": 0.4639, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.1564327485380117, | |
| "grad_norm": 2.2262823581695557, | |
| "learning_rate": 1.5497076023391813e-05, | |
| "loss": 0.4266, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.15789473684210525, | |
| "grad_norm": 1.9805610179901123, | |
| "learning_rate": 1.564327485380117e-05, | |
| "loss": 0.3326, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.15935672514619884, | |
| "grad_norm": 1.497840404510498, | |
| "learning_rate": 1.5789473684210526e-05, | |
| "loss": 0.2294, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.1608187134502924, | |
| "grad_norm": 1.9749515056610107, | |
| "learning_rate": 1.5935672514619886e-05, | |
| "loss": 0.2891, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.16228070175438597, | |
| "grad_norm": 2.430293321609497, | |
| "learning_rate": 1.608187134502924e-05, | |
| "loss": 0.4273, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.16374269005847952, | |
| "grad_norm": 1.9196723699569702, | |
| "learning_rate": 1.62280701754386e-05, | |
| "loss": 0.4144, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.1652046783625731, | |
| "grad_norm": 2.0163681507110596, | |
| "learning_rate": 1.6374269005847955e-05, | |
| "loss": 0.3724, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.16666666666666666, | |
| "grad_norm": 1.5672687292099, | |
| "learning_rate": 1.652046783625731e-05, | |
| "loss": 0.3081, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.16812865497076024, | |
| "grad_norm": 2.052649974822998, | |
| "learning_rate": 1.6666666666666667e-05, | |
| "loss": 0.2418, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.1695906432748538, | |
| "grad_norm": 1.809874415397644, | |
| "learning_rate": 1.6812865497076024e-05, | |
| "loss": 0.3187, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.17105263157894737, | |
| "grad_norm": 1.6516849994659424, | |
| "learning_rate": 1.695906432748538e-05, | |
| "loss": 0.3157, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.17251461988304093, | |
| "grad_norm": 2.8488309383392334, | |
| "learning_rate": 1.7105263157894737e-05, | |
| "loss": 0.8204, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.1739766081871345, | |
| "grad_norm": 1.6678235530853271, | |
| "learning_rate": 1.7251461988304093e-05, | |
| "loss": 0.3324, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.17543859649122806, | |
| "grad_norm": 2.4188318252563477, | |
| "learning_rate": 1.7397660818713453e-05, | |
| "loss": 0.3969, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.17690058479532164, | |
| "grad_norm": 1.8499585390090942, | |
| "learning_rate": 1.7543859649122806e-05, | |
| "loss": 0.2678, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.1783625730994152, | |
| "grad_norm": 1.4912441968917847, | |
| "learning_rate": 1.7690058479532165e-05, | |
| "loss": 0.3079, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.17982456140350878, | |
| "grad_norm": 1.5774823427200317, | |
| "learning_rate": 1.7836257309941522e-05, | |
| "loss": 0.3191, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.18128654970760233, | |
| "grad_norm": 1.8139753341674805, | |
| "learning_rate": 1.7982456140350878e-05, | |
| "loss": 0.284, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.1827485380116959, | |
| "grad_norm": 1.904847502708435, | |
| "learning_rate": 1.8128654970760235e-05, | |
| "loss": 0.2867, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.18421052631578946, | |
| "grad_norm": 1.5972429513931274, | |
| "learning_rate": 1.827485380116959e-05, | |
| "loss": 0.2595, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.18567251461988304, | |
| "grad_norm": 1.8197851181030273, | |
| "learning_rate": 1.8421052631578947e-05, | |
| "loss": 0.3642, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.1871345029239766, | |
| "grad_norm": 1.9099805355072021, | |
| "learning_rate": 1.8567251461988304e-05, | |
| "loss": 0.391, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.18859649122807018, | |
| "grad_norm": 2.2964887619018555, | |
| "learning_rate": 1.871345029239766e-05, | |
| "loss": 0.3504, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.19005847953216373, | |
| "grad_norm": 1.8502508401870728, | |
| "learning_rate": 1.885964912280702e-05, | |
| "loss": 0.3005, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.1915204678362573, | |
| "grad_norm": 1.989492654800415, | |
| "learning_rate": 1.9005847953216373e-05, | |
| "loss": 0.393, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.19298245614035087, | |
| "grad_norm": 2.0604186058044434, | |
| "learning_rate": 1.9152046783625733e-05, | |
| "loss": 0.2991, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.19444444444444445, | |
| "grad_norm": 1.8911117315292358, | |
| "learning_rate": 1.929824561403509e-05, | |
| "loss": 0.3842, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.195906432748538, | |
| "grad_norm": 1.5201209783554077, | |
| "learning_rate": 1.9444444444444445e-05, | |
| "loss": 0.3527, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.19736842105263158, | |
| "grad_norm": 2.6792094707489014, | |
| "learning_rate": 1.9590643274853802e-05, | |
| "loss": 0.4835, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.19883040935672514, | |
| "grad_norm": 1.8801100254058838, | |
| "learning_rate": 1.9736842105263158e-05, | |
| "loss": 0.333, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.20029239766081872, | |
| "grad_norm": 1.9986276626586914, | |
| "learning_rate": 1.9883040935672515e-05, | |
| "loss": 0.41, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.20175438596491227, | |
| "grad_norm": 2.249020576477051, | |
| "learning_rate": 2.0029239766081874e-05, | |
| "loss": 0.3672, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.20321637426900585, | |
| "grad_norm": 1.679406762123108, | |
| "learning_rate": 2.0175438596491227e-05, | |
| "loss": 0.3311, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.2046783625730994, | |
| "grad_norm": 2.2505438327789307, | |
| "learning_rate": 2.0321637426900587e-05, | |
| "loss": 0.3312, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.20614035087719298, | |
| "grad_norm": 1.78598952293396, | |
| "learning_rate": 2.046783625730994e-05, | |
| "loss": 0.3686, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.20760233918128654, | |
| "grad_norm": 2.0316102504730225, | |
| "learning_rate": 2.06140350877193e-05, | |
| "loss": 0.3573, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.20906432748538012, | |
| "grad_norm": 1.8309293985366821, | |
| "learning_rate": 2.0760233918128656e-05, | |
| "loss": 0.2878, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.21052631578947367, | |
| "grad_norm": 1.5772371292114258, | |
| "learning_rate": 2.0906432748538013e-05, | |
| "loss": 0.257, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.21198830409356725, | |
| "grad_norm": 1.4653829336166382, | |
| "learning_rate": 2.105263157894737e-05, | |
| "loss": 0.238, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.2134502923976608, | |
| "grad_norm": 1.4423526525497437, | |
| "learning_rate": 2.1198830409356725e-05, | |
| "loss": 0.2228, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.2149122807017544, | |
| "grad_norm": 1.665651559829712, | |
| "learning_rate": 2.134502923976608e-05, | |
| "loss": 0.342, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.21637426900584794, | |
| "grad_norm": 1.5140175819396973, | |
| "learning_rate": 2.149122807017544e-05, | |
| "loss": 0.2752, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.21783625730994152, | |
| "grad_norm": 1.991401195526123, | |
| "learning_rate": 2.1637426900584794e-05, | |
| "loss": 0.3925, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.21929824561403508, | |
| "grad_norm": 1.882815957069397, | |
| "learning_rate": 2.1783625730994154e-05, | |
| "loss": 0.3556, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.22076023391812866, | |
| "grad_norm": 1.9453823566436768, | |
| "learning_rate": 2.1929824561403507e-05, | |
| "loss": 0.3577, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.2222222222222222, | |
| "grad_norm": 1.7709293365478516, | |
| "learning_rate": 2.2076023391812867e-05, | |
| "loss": 0.3289, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.2236842105263158, | |
| "grad_norm": 1.55705988407135, | |
| "learning_rate": 2.2222222222222223e-05, | |
| "loss": 0.2733, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.22514619883040934, | |
| "grad_norm": 2.3360848426818848, | |
| "learning_rate": 2.236842105263158e-05, | |
| "loss": 0.4267, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.22660818713450293, | |
| "grad_norm": 2.1530961990356445, | |
| "learning_rate": 2.2514619883040936e-05, | |
| "loss": 0.4047, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.22807017543859648, | |
| "grad_norm": 2.1304171085357666, | |
| "learning_rate": 2.2660818713450292e-05, | |
| "loss": 0.3136, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.22953216374269006, | |
| "grad_norm": 1.545301079750061, | |
| "learning_rate": 2.280701754385965e-05, | |
| "loss": 0.2586, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.2309941520467836, | |
| "grad_norm": 1.5622113943099976, | |
| "learning_rate": 2.295321637426901e-05, | |
| "loss": 0.3171, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.2324561403508772, | |
| "grad_norm": 2.239356517791748, | |
| "learning_rate": 2.309941520467836e-05, | |
| "loss": 0.3129, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.23391812865497075, | |
| "grad_norm": 1.6174793243408203, | |
| "learning_rate": 2.324561403508772e-05, | |
| "loss": 0.2842, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.23538011695906433, | |
| "grad_norm": 1.8902108669281006, | |
| "learning_rate": 2.3391812865497074e-05, | |
| "loss": 0.3787, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.23684210526315788, | |
| "grad_norm": 2.0892152786254883, | |
| "learning_rate": 2.3538011695906434e-05, | |
| "loss": 0.3162, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.23830409356725146, | |
| "grad_norm": 1.732193112373352, | |
| "learning_rate": 2.368421052631579e-05, | |
| "loss": 0.3186, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.23976608187134502, | |
| "grad_norm": 1.8098409175872803, | |
| "learning_rate": 2.3830409356725147e-05, | |
| "loss": 0.3302, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.2412280701754386, | |
| "grad_norm": 1.7397595643997192, | |
| "learning_rate": 2.3976608187134503e-05, | |
| "loss": 0.3294, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.24269005847953215, | |
| "grad_norm": 2.1776487827301025, | |
| "learning_rate": 2.412280701754386e-05, | |
| "loss": 0.3798, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.24415204678362573, | |
| "grad_norm": 2.2244021892547607, | |
| "learning_rate": 2.4269005847953216e-05, | |
| "loss": 0.4095, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.24561403508771928, | |
| "grad_norm": 1.6893987655639648, | |
| "learning_rate": 2.4415204678362576e-05, | |
| "loss": 0.2719, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.24707602339181287, | |
| "grad_norm": 2.039149761199951, | |
| "learning_rate": 2.456140350877193e-05, | |
| "loss": 0.4732, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.24853801169590642, | |
| "grad_norm": 1.6376155614852905, | |
| "learning_rate": 2.470760233918129e-05, | |
| "loss": 0.3169, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 1.7381919622421265, | |
| "learning_rate": 2.485380116959064e-05, | |
| "loss": 0.3688, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.25146198830409355, | |
| "grad_norm": 1.8953245878219604, | |
| "learning_rate": 2.5e-05, | |
| "loss": 0.3869, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.25292397660818716, | |
| "grad_norm": 1.5782065391540527, | |
| "learning_rate": 2.5146198830409358e-05, | |
| "loss": 0.2483, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.2543859649122807, | |
| "grad_norm": 1.716516375541687, | |
| "learning_rate": 2.5292397660818717e-05, | |
| "loss": 0.3779, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.25584795321637427, | |
| "grad_norm": 2.4229736328125, | |
| "learning_rate": 2.5438596491228074e-05, | |
| "loss": 0.4408, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.2573099415204678, | |
| "grad_norm": 1.5032482147216797, | |
| "learning_rate": 2.5584795321637427e-05, | |
| "loss": 0.2432, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.25877192982456143, | |
| "grad_norm": 1.4741259813308716, | |
| "learning_rate": 2.5730994152046783e-05, | |
| "loss": 0.2688, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.260233918128655, | |
| "grad_norm": 1.6456725597381592, | |
| "learning_rate": 2.5877192982456143e-05, | |
| "loss": 0.3072, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.26169590643274854, | |
| "grad_norm": 2.748579263687134, | |
| "learning_rate": 2.60233918128655e-05, | |
| "loss": 0.4332, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.2631578947368421, | |
| "grad_norm": 1.934592843055725, | |
| "learning_rate": 2.6169590643274856e-05, | |
| "loss": 0.3391, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.2646198830409357, | |
| "grad_norm": 1.712693452835083, | |
| "learning_rate": 2.6315789473684212e-05, | |
| "loss": 0.2309, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.26608187134502925, | |
| "grad_norm": 1.576505184173584, | |
| "learning_rate": 2.6461988304093572e-05, | |
| "loss": 0.226, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.2675438596491228, | |
| "grad_norm": 1.5628488063812256, | |
| "learning_rate": 2.6608187134502928e-05, | |
| "loss": 0.2414, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.26900584795321636, | |
| "grad_norm": 1.5298950672149658, | |
| "learning_rate": 2.675438596491228e-05, | |
| "loss": 0.3546, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.27046783625730997, | |
| "grad_norm": 1.9669793844223022, | |
| "learning_rate": 2.6900584795321637e-05, | |
| "loss": 0.314, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.2719298245614035, | |
| "grad_norm": 1.9723678827285767, | |
| "learning_rate": 2.7046783625730997e-05, | |
| "loss": 0.3389, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.2733918128654971, | |
| "grad_norm": 2.0154645442962646, | |
| "learning_rate": 2.7192982456140354e-05, | |
| "loss": 0.305, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.27485380116959063, | |
| "grad_norm": 2.423405170440674, | |
| "learning_rate": 2.733918128654971e-05, | |
| "loss": 0.4848, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.27631578947368424, | |
| "grad_norm": 1.7974543571472168, | |
| "learning_rate": 2.7485380116959063e-05, | |
| "loss": 0.3611, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.2777777777777778, | |
| "grad_norm": 1.810512661933899, | |
| "learning_rate": 2.7631578947368426e-05, | |
| "loss": 0.2957, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.27923976608187134, | |
| "grad_norm": 2.8855981826782227, | |
| "learning_rate": 2.777777777777778e-05, | |
| "loss": 0.2873, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.2807017543859649, | |
| "grad_norm": 1.9867562055587769, | |
| "learning_rate": 2.7923976608187135e-05, | |
| "loss": 0.3715, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.2821637426900585, | |
| "grad_norm": 2.1393628120422363, | |
| "learning_rate": 2.8070175438596492e-05, | |
| "loss": 0.4265, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.28362573099415206, | |
| "grad_norm": 2.435749053955078, | |
| "learning_rate": 2.821637426900585e-05, | |
| "loss": 0.5519, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.2850877192982456, | |
| "grad_norm": 2.097238779067993, | |
| "learning_rate": 2.8362573099415208e-05, | |
| "loss": 0.3412, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.28654970760233917, | |
| "grad_norm": 2.1815011501312256, | |
| "learning_rate": 2.850877192982456e-05, | |
| "loss": 0.2972, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.2880116959064328, | |
| "grad_norm": 2.151921033859253, | |
| "learning_rate": 2.8654970760233917e-05, | |
| "loss": 0.4724, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.2894736842105263, | |
| "grad_norm": 1.9080790281295776, | |
| "learning_rate": 2.8801169590643277e-05, | |
| "loss": 0.4011, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.2909356725146199, | |
| "grad_norm": 1.7844010591506958, | |
| "learning_rate": 2.8947368421052634e-05, | |
| "loss": 0.349, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.29239766081871343, | |
| "grad_norm": 1.4919390678405762, | |
| "learning_rate": 2.909356725146199e-05, | |
| "loss": 0.254, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.29385964912280704, | |
| "grad_norm": 1.8354111909866333, | |
| "learning_rate": 2.9239766081871346e-05, | |
| "loss": 0.4328, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.2953216374269006, | |
| "grad_norm": 1.5062732696533203, | |
| "learning_rate": 2.9385964912280706e-05, | |
| "loss": 0.302, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.29678362573099415, | |
| "grad_norm": 1.7705012559890747, | |
| "learning_rate": 2.9532163742690062e-05, | |
| "loss": 0.3333, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.2982456140350877, | |
| "grad_norm": 2.125746488571167, | |
| "learning_rate": 2.9678362573099415e-05, | |
| "loss": 0.3358, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.2997076023391813, | |
| "grad_norm": 2.0204596519470215, | |
| "learning_rate": 2.9824561403508772e-05, | |
| "loss": 0.4405, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.30116959064327486, | |
| "grad_norm": 1.980130910873413, | |
| "learning_rate": 2.997076023391813e-05, | |
| "loss": 0.3859, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.3026315789473684, | |
| "grad_norm": 1.4389185905456543, | |
| "learning_rate": 3.0116959064327488e-05, | |
| "loss": 0.3301, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.30409356725146197, | |
| "grad_norm": 2.386847972869873, | |
| "learning_rate": 3.0263157894736844e-05, | |
| "loss": 0.4717, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.3055555555555556, | |
| "grad_norm": 2.0449209213256836, | |
| "learning_rate": 3.0409356725146197e-05, | |
| "loss": 0.4165, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.30701754385964913, | |
| "grad_norm": 1.7676043510437012, | |
| "learning_rate": 3.055555555555556e-05, | |
| "loss": 0.3402, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.3084795321637427, | |
| "grad_norm": 1.8220148086547852, | |
| "learning_rate": 3.0701754385964913e-05, | |
| "loss": 0.3292, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.30994152046783624, | |
| "grad_norm": 2.1669318675994873, | |
| "learning_rate": 3.084795321637427e-05, | |
| "loss": 0.423, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.31140350877192985, | |
| "grad_norm": 1.9490599632263184, | |
| "learning_rate": 3.0994152046783626e-05, | |
| "loss": 0.3466, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.3128654970760234, | |
| "grad_norm": 1.8736350536346436, | |
| "learning_rate": 3.1140350877192986e-05, | |
| "loss": 0.356, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.31432748538011696, | |
| "grad_norm": 1.8510347604751587, | |
| "learning_rate": 3.128654970760234e-05, | |
| "loss": 0.372, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.3157894736842105, | |
| "grad_norm": 1.6804232597351074, | |
| "learning_rate": 3.14327485380117e-05, | |
| "loss": 0.3016, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.3172514619883041, | |
| "grad_norm": 1.6097888946533203, | |
| "learning_rate": 3.157894736842105e-05, | |
| "loss": 0.2383, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.31871345029239767, | |
| "grad_norm": 2.6004140377044678, | |
| "learning_rate": 3.172514619883041e-05, | |
| "loss": 0.5164, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.3201754385964912, | |
| "grad_norm": 1.973547101020813, | |
| "learning_rate": 3.187134502923977e-05, | |
| "loss": 0.3614, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.3216374269005848, | |
| "grad_norm": 3.3534610271453857, | |
| "learning_rate": 3.2017543859649124e-05, | |
| "loss": 0.4569, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.3230994152046784, | |
| "grad_norm": 1.677751898765564, | |
| "learning_rate": 3.216374269005848e-05, | |
| "loss": 0.2575, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.32456140350877194, | |
| "grad_norm": 1.71629798412323, | |
| "learning_rate": 3.230994152046784e-05, | |
| "loss": 0.3216, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.3260233918128655, | |
| "grad_norm": 2.218287467956543, | |
| "learning_rate": 3.24561403508772e-05, | |
| "loss": 0.3825, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.32748538011695905, | |
| "grad_norm": 1.6495413780212402, | |
| "learning_rate": 3.260233918128655e-05, | |
| "loss": 0.3544, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.32894736842105265, | |
| "grad_norm": 2.727339744567871, | |
| "learning_rate": 3.274853801169591e-05, | |
| "loss": 0.4822, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.3304093567251462, | |
| "grad_norm": 2.028611660003662, | |
| "learning_rate": 3.289473684210527e-05, | |
| "loss": 0.3344, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.33187134502923976, | |
| "grad_norm": 1.6433840990066528, | |
| "learning_rate": 3.304093567251462e-05, | |
| "loss": 0.2784, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 0.3333333333333333, | |
| "grad_norm": 2.251619815826416, | |
| "learning_rate": 3.3187134502923975e-05, | |
| "loss": 0.416, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.3347953216374269, | |
| "grad_norm": 1.9172239303588867, | |
| "learning_rate": 3.3333333333333335e-05, | |
| "loss": 0.2786, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 0.3362573099415205, | |
| "grad_norm": 2.3143632411956787, | |
| "learning_rate": 3.3479532163742695e-05, | |
| "loss": 0.3497, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.33771929824561403, | |
| "grad_norm": 1.984010934829712, | |
| "learning_rate": 3.362573099415205e-05, | |
| "loss": 0.2244, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 0.3391812865497076, | |
| "grad_norm": 1.8638683557510376, | |
| "learning_rate": 3.377192982456141e-05, | |
| "loss": 0.4262, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.3406432748538012, | |
| "grad_norm": 1.868976354598999, | |
| "learning_rate": 3.391812865497076e-05, | |
| "loss": 0.4227, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 0.34210526315789475, | |
| "grad_norm": 1.8778854608535767, | |
| "learning_rate": 3.406432748538012e-05, | |
| "loss": 0.355, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.3435672514619883, | |
| "grad_norm": 2.503788471221924, | |
| "learning_rate": 3.421052631578947e-05, | |
| "loss": 0.5787, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.34502923976608185, | |
| "grad_norm": 1.6835317611694336, | |
| "learning_rate": 3.435672514619883e-05, | |
| "loss": 0.2707, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.34649122807017546, | |
| "grad_norm": 2.123242139816284, | |
| "learning_rate": 3.4502923976608186e-05, | |
| "loss": 0.4741, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 0.347953216374269, | |
| "grad_norm": 1.3670194149017334, | |
| "learning_rate": 3.4649122807017546e-05, | |
| "loss": 0.2723, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.34941520467836257, | |
| "grad_norm": 1.939557671546936, | |
| "learning_rate": 3.4795321637426905e-05, | |
| "loss": 0.2941, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 0.3508771929824561, | |
| "grad_norm": 1.5702491998672485, | |
| "learning_rate": 3.494152046783626e-05, | |
| "loss": 0.346, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.35233918128654973, | |
| "grad_norm": 1.7520431280136108, | |
| "learning_rate": 3.508771929824561e-05, | |
| "loss": 0.3362, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 0.3538011695906433, | |
| "grad_norm": 1.3005727529525757, | |
| "learning_rate": 3.523391812865498e-05, | |
| "loss": 0.2198, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.35526315789473684, | |
| "grad_norm": 2.0419647693634033, | |
| "learning_rate": 3.538011695906433e-05, | |
| "loss": 0.2627, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 0.3567251461988304, | |
| "grad_norm": 1.8182775974273682, | |
| "learning_rate": 3.5526315789473684e-05, | |
| "loss": 0.3093, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.358187134502924, | |
| "grad_norm": 1.592496395111084, | |
| "learning_rate": 3.5672514619883044e-05, | |
| "loss": 0.2943, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.35964912280701755, | |
| "grad_norm": 1.6161870956420898, | |
| "learning_rate": 3.5818713450292403e-05, | |
| "loss": 0.3118, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 0.3611111111111111, | |
| "grad_norm": 2.633676290512085, | |
| "learning_rate": 3.5964912280701756e-05, | |
| "loss": 0.3927, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 0.36257309941520466, | |
| "grad_norm": 2.1901743412017822, | |
| "learning_rate": 3.611111111111111e-05, | |
| "loss": 0.4944, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.36403508771929827, | |
| "grad_norm": 2.667672872543335, | |
| "learning_rate": 3.625730994152047e-05, | |
| "loss": 0.4855, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 0.3654970760233918, | |
| "grad_norm": 1.9923423528671265, | |
| "learning_rate": 3.640350877192983e-05, | |
| "loss": 0.372, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.3669590643274854, | |
| "grad_norm": 2.3821113109588623, | |
| "learning_rate": 3.654970760233918e-05, | |
| "loss": 0.3471, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 0.3684210526315789, | |
| "grad_norm": 1.5223642587661743, | |
| "learning_rate": 3.669590643274854e-05, | |
| "loss": 0.3728, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 0.36988304093567254, | |
| "grad_norm": 1.6055282354354858, | |
| "learning_rate": 3.6842105263157895e-05, | |
| "loss": 0.2399, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 0.3713450292397661, | |
| "grad_norm": 2.2257461547851562, | |
| "learning_rate": 3.6988304093567254e-05, | |
| "loss": 0.4523, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 0.37280701754385964, | |
| "grad_norm": 2.265007257461548, | |
| "learning_rate": 3.713450292397661e-05, | |
| "loss": 0.4584, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.3742690058479532, | |
| "grad_norm": 1.7679740190505981, | |
| "learning_rate": 3.728070175438597e-05, | |
| "loss": 0.3462, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 0.3757309941520468, | |
| "grad_norm": 1.6778432130813599, | |
| "learning_rate": 3.742690058479532e-05, | |
| "loss": 0.3014, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 0.37719298245614036, | |
| "grad_norm": 1.953843355178833, | |
| "learning_rate": 3.757309941520468e-05, | |
| "loss": 0.5143, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 0.3786549707602339, | |
| "grad_norm": 1.726922869682312, | |
| "learning_rate": 3.771929824561404e-05, | |
| "loss": 0.3495, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 0.38011695906432746, | |
| "grad_norm": 1.7834779024124146, | |
| "learning_rate": 3.786549707602339e-05, | |
| "loss": 0.3608, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.3815789473684211, | |
| "grad_norm": 1.866376280784607, | |
| "learning_rate": 3.8011695906432746e-05, | |
| "loss": 0.3882, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 0.3830409356725146, | |
| "grad_norm": 1.5821527242660522, | |
| "learning_rate": 3.815789473684211e-05, | |
| "loss": 0.3464, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 0.3845029239766082, | |
| "grad_norm": 1.9589916467666626, | |
| "learning_rate": 3.8304093567251465e-05, | |
| "loss": 0.3278, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 0.38596491228070173, | |
| "grad_norm": 1.7693442106246948, | |
| "learning_rate": 3.845029239766082e-05, | |
| "loss": 0.3691, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 0.38742690058479534, | |
| "grad_norm": 1.71920645236969, | |
| "learning_rate": 3.859649122807018e-05, | |
| "loss": 0.3383, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.3888888888888889, | |
| "grad_norm": 2.1875786781311035, | |
| "learning_rate": 3.874269005847954e-05, | |
| "loss": 0.45, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 0.39035087719298245, | |
| "grad_norm": 2.0212056636810303, | |
| "learning_rate": 3.888888888888889e-05, | |
| "loss": 0.3146, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 0.391812865497076, | |
| "grad_norm": 1.5974324941635132, | |
| "learning_rate": 3.9035087719298244e-05, | |
| "loss": 0.3886, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 0.3932748538011696, | |
| "grad_norm": 1.5394872426986694, | |
| "learning_rate": 3.9181286549707604e-05, | |
| "loss": 0.2601, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 0.39473684210526316, | |
| "grad_norm": 2.1220672130584717, | |
| "learning_rate": 3.932748538011696e-05, | |
| "loss": 0.3592, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.3961988304093567, | |
| "grad_norm": 2.2934184074401855, | |
| "learning_rate": 3.9473684210526316e-05, | |
| "loss": 0.4686, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 0.39766081871345027, | |
| "grad_norm": 2.3653199672698975, | |
| "learning_rate": 3.9619883040935676e-05, | |
| "loss": 0.5325, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 0.3991228070175439, | |
| "grad_norm": 2.072711706161499, | |
| "learning_rate": 3.976608187134503e-05, | |
| "loss": 0.4735, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 0.40058479532163743, | |
| "grad_norm": 1.5149919986724854, | |
| "learning_rate": 3.991228070175439e-05, | |
| "loss": 0.2688, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 0.402046783625731, | |
| "grad_norm": 1.963314414024353, | |
| "learning_rate": 4.005847953216375e-05, | |
| "loss": 0.3744, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.40350877192982454, | |
| "grad_norm": 2.0859949588775635, | |
| "learning_rate": 4.02046783625731e-05, | |
| "loss": 0.3925, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 0.40497076023391815, | |
| "grad_norm": 1.7795841693878174, | |
| "learning_rate": 4.0350877192982455e-05, | |
| "loss": 0.4322, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 0.4064327485380117, | |
| "grad_norm": 1.928659439086914, | |
| "learning_rate": 4.0497076023391814e-05, | |
| "loss": 0.3528, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 0.40789473684210525, | |
| "grad_norm": 1.6361182928085327, | |
| "learning_rate": 4.0643274853801174e-05, | |
| "loss": 0.2749, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 0.4093567251461988, | |
| "grad_norm": 1.4732563495635986, | |
| "learning_rate": 4.078947368421053e-05, | |
| "loss": 0.2708, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.4108187134502924, | |
| "grad_norm": 2.180634021759033, | |
| "learning_rate": 4.093567251461988e-05, | |
| "loss": 0.4575, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 0.41228070175438597, | |
| "grad_norm": 2.4502696990966797, | |
| "learning_rate": 4.1081871345029247e-05, | |
| "loss": 0.4141, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 0.4137426900584795, | |
| "grad_norm": 2.5899109840393066, | |
| "learning_rate": 4.12280701754386e-05, | |
| "loss": 0.5448, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 0.4152046783625731, | |
| "grad_norm": 1.8651745319366455, | |
| "learning_rate": 4.137426900584795e-05, | |
| "loss": 0.4004, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 0.4166666666666667, | |
| "grad_norm": 1.8774100542068481, | |
| "learning_rate": 4.152046783625731e-05, | |
| "loss": 0.4367, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.41812865497076024, | |
| "grad_norm": 2.0152692794799805, | |
| "learning_rate": 4.166666666666667e-05, | |
| "loss": 0.3619, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 0.4195906432748538, | |
| "grad_norm": 1.7969722747802734, | |
| "learning_rate": 4.1812865497076025e-05, | |
| "loss": 0.3515, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 0.42105263157894735, | |
| "grad_norm": 1.9542049169540405, | |
| "learning_rate": 4.195906432748538e-05, | |
| "loss": 0.3446, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 0.42251461988304095, | |
| "grad_norm": 1.8978732824325562, | |
| "learning_rate": 4.210526315789474e-05, | |
| "loss": 0.2951, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 0.4239766081871345, | |
| "grad_norm": 2.6646249294281006, | |
| "learning_rate": 4.22514619883041e-05, | |
| "loss": 0.6275, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.42543859649122806, | |
| "grad_norm": 2.3016018867492676, | |
| "learning_rate": 4.239766081871345e-05, | |
| "loss": 0.3165, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 0.4269005847953216, | |
| "grad_norm": 1.3937578201293945, | |
| "learning_rate": 4.254385964912281e-05, | |
| "loss": 0.3096, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 0.4283625730994152, | |
| "grad_norm": 2.069260597229004, | |
| "learning_rate": 4.269005847953216e-05, | |
| "loss": 0.5325, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 0.4298245614035088, | |
| "grad_norm": 2.3702552318573, | |
| "learning_rate": 4.283625730994152e-05, | |
| "loss": 0.4822, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 0.43128654970760233, | |
| "grad_norm": 1.6650553941726685, | |
| "learning_rate": 4.298245614035088e-05, | |
| "loss": 0.4216, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.4327485380116959, | |
| "grad_norm": 1.7805320024490356, | |
| "learning_rate": 4.3128654970760236e-05, | |
| "loss": 0.3329, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 0.4342105263157895, | |
| "grad_norm": 2.095808267593384, | |
| "learning_rate": 4.327485380116959e-05, | |
| "loss": 0.4621, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 0.43567251461988304, | |
| "grad_norm": 2.1446943283081055, | |
| "learning_rate": 4.342105263157895e-05, | |
| "loss": 0.3277, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 0.4371345029239766, | |
| "grad_norm": 1.7880353927612305, | |
| "learning_rate": 4.356725146198831e-05, | |
| "loss": 0.5058, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 0.43859649122807015, | |
| "grad_norm": 1.5456608533859253, | |
| "learning_rate": 4.371345029239766e-05, | |
| "loss": 0.295, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.44005847953216376, | |
| "grad_norm": 1.6270560026168823, | |
| "learning_rate": 4.3859649122807014e-05, | |
| "loss": 0.3263, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 0.4415204678362573, | |
| "grad_norm": 1.6269253492355347, | |
| "learning_rate": 4.400584795321638e-05, | |
| "loss": 0.2614, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 0.44298245614035087, | |
| "grad_norm": 2.0793192386627197, | |
| "learning_rate": 4.4152046783625734e-05, | |
| "loss": 0.3969, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 0.4444444444444444, | |
| "grad_norm": 2.2050130367279053, | |
| "learning_rate": 4.429824561403509e-05, | |
| "loss": 0.3631, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 0.44590643274853803, | |
| "grad_norm": 1.6470381021499634, | |
| "learning_rate": 4.4444444444444447e-05, | |
| "loss": 0.3681, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.4473684210526316, | |
| "grad_norm": 1.7928194999694824, | |
| "learning_rate": 4.4590643274853806e-05, | |
| "loss": 0.4091, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 0.44883040935672514, | |
| "grad_norm": 2.948406934738159, | |
| "learning_rate": 4.473684210526316e-05, | |
| "loss": 0.3836, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 0.4502923976608187, | |
| "grad_norm": 2.0245373249053955, | |
| "learning_rate": 4.488304093567251e-05, | |
| "loss": 0.3924, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 0.4517543859649123, | |
| "grad_norm": 1.6427713632583618, | |
| "learning_rate": 4.502923976608187e-05, | |
| "loss": 0.3414, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 0.45321637426900585, | |
| "grad_norm": 2.387951374053955, | |
| "learning_rate": 4.517543859649123e-05, | |
| "loss": 0.5332, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.4546783625730994, | |
| "grad_norm": 1.7910897731781006, | |
| "learning_rate": 4.5321637426900585e-05, | |
| "loss": 0.3698, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 0.45614035087719296, | |
| "grad_norm": 2.0086562633514404, | |
| "learning_rate": 4.5467836257309945e-05, | |
| "loss": 0.3384, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 0.45760233918128657, | |
| "grad_norm": 2.6067330837249756, | |
| "learning_rate": 4.56140350877193e-05, | |
| "loss": 0.3492, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 0.4590643274853801, | |
| "grad_norm": 1.7646232843399048, | |
| "learning_rate": 4.576023391812866e-05, | |
| "loss": 0.4478, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 0.4605263157894737, | |
| "grad_norm": 1.8665695190429688, | |
| "learning_rate": 4.590643274853802e-05, | |
| "loss": 0.4507, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.4619883040935672, | |
| "grad_norm": 1.7864433526992798, | |
| "learning_rate": 4.605263157894737e-05, | |
| "loss": 0.3681, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 0.46345029239766083, | |
| "grad_norm": 1.9625555276870728, | |
| "learning_rate": 4.619883040935672e-05, | |
| "loss": 0.2751, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 0.4649122807017544, | |
| "grad_norm": 1.8124951124191284, | |
| "learning_rate": 4.634502923976608e-05, | |
| "loss": 0.4775, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 0.46637426900584794, | |
| "grad_norm": 1.4325212240219116, | |
| "learning_rate": 4.649122807017544e-05, | |
| "loss": 0.2601, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 0.4678362573099415, | |
| "grad_norm": 1.5628498792648315, | |
| "learning_rate": 4.6637426900584796e-05, | |
| "loss": 0.3159, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.4692982456140351, | |
| "grad_norm": 1.7195894718170166, | |
| "learning_rate": 4.678362573099415e-05, | |
| "loss": 0.3473, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 0.47076023391812866, | |
| "grad_norm": 1.5737459659576416, | |
| "learning_rate": 4.6929824561403515e-05, | |
| "loss": 0.316, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 0.4722222222222222, | |
| "grad_norm": 1.5952339172363281, | |
| "learning_rate": 4.707602339181287e-05, | |
| "loss": 0.3855, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 0.47368421052631576, | |
| "grad_norm": 1.8633426427841187, | |
| "learning_rate": 4.722222222222222e-05, | |
| "loss": 0.272, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 0.47514619883040937, | |
| "grad_norm": 1.8762574195861816, | |
| "learning_rate": 4.736842105263158e-05, | |
| "loss": 0.3793, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.4766081871345029, | |
| "grad_norm": 2.399427652359009, | |
| "learning_rate": 4.751461988304094e-05, | |
| "loss": 0.519, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 0.4780701754385965, | |
| "grad_norm": 1.8337801694869995, | |
| "learning_rate": 4.7660818713450294e-05, | |
| "loss": 0.4216, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 0.47953216374269003, | |
| "grad_norm": 2.2894349098205566, | |
| "learning_rate": 4.780701754385965e-05, | |
| "loss": 0.523, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 0.48099415204678364, | |
| "grad_norm": 2.033219814300537, | |
| "learning_rate": 4.7953216374269006e-05, | |
| "loss": 0.3586, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 0.4824561403508772, | |
| "grad_norm": 1.827260136604309, | |
| "learning_rate": 4.8099415204678366e-05, | |
| "loss": 0.4258, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.48391812865497075, | |
| "grad_norm": 1.9719338417053223, | |
| "learning_rate": 4.824561403508772e-05, | |
| "loss": 0.4291, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 0.4853801169590643, | |
| "grad_norm": 2.074366569519043, | |
| "learning_rate": 4.839181286549708e-05, | |
| "loss": 0.4074, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 0.4868421052631579, | |
| "grad_norm": 2.5167973041534424, | |
| "learning_rate": 4.853801169590643e-05, | |
| "loss": 0.5734, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 0.48830409356725146, | |
| "grad_norm": 1.715615153312683, | |
| "learning_rate": 4.868421052631579e-05, | |
| "loss": 0.3627, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 0.489766081871345, | |
| "grad_norm": 1.6657428741455078, | |
| "learning_rate": 4.883040935672515e-05, | |
| "loss": 0.3648, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.49122807017543857, | |
| "grad_norm": 2.268401622772217, | |
| "learning_rate": 4.8976608187134504e-05, | |
| "loss": 0.43, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 0.4926900584795322, | |
| "grad_norm": 1.8155171871185303, | |
| "learning_rate": 4.912280701754386e-05, | |
| "loss": 0.4696, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 0.49415204678362573, | |
| "grad_norm": 1.8868937492370605, | |
| "learning_rate": 4.926900584795322e-05, | |
| "loss": 0.4021, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 0.4956140350877193, | |
| "grad_norm": 1.800012469291687, | |
| "learning_rate": 4.941520467836258e-05, | |
| "loss": 0.3976, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 0.49707602339181284, | |
| "grad_norm": 1.8147532939910889, | |
| "learning_rate": 4.956140350877193e-05, | |
| "loss": 0.4306, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.49853801169590645, | |
| "grad_norm": 1.6838949918746948, | |
| "learning_rate": 4.970760233918128e-05, | |
| "loss": 0.3577, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 1.5661842823028564, | |
| "learning_rate": 4.985380116959065e-05, | |
| "loss": 0.3152, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 0.5014619883040936, | |
| "grad_norm": 1.998780608177185, | |
| "learning_rate": 5e-05, | |
| "loss": 0.3999, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 0.5029239766081871, | |
| "grad_norm": 2.1736700534820557, | |
| "learning_rate": 4.9999986978155956e-05, | |
| "loss": 0.4295, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 0.5043859649122807, | |
| "grad_norm": 1.489250898361206, | |
| "learning_rate": 4.999994791263739e-05, | |
| "loss": 0.3144, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.5058479532163743, | |
| "grad_norm": 1.9218119382858276, | |
| "learning_rate": 4.9999882803485004e-05, | |
| "loss": 0.4255, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 0.5073099415204678, | |
| "grad_norm": 2.5860095024108887, | |
| "learning_rate": 4.9999791650766615e-05, | |
| "loss": 0.5971, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 0.5087719298245614, | |
| "grad_norm": 1.5442886352539062, | |
| "learning_rate": 4.999967445457718e-05, | |
| "loss": 0.3232, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 0.5102339181286549, | |
| "grad_norm": 1.8710730075836182, | |
| "learning_rate": 4.999953121503881e-05, | |
| "loss": 0.4056, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 0.5116959064327485, | |
| "grad_norm": 1.7341574430465698, | |
| "learning_rate": 4.99993619323007e-05, | |
| "loss": 0.3727, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.5131578947368421, | |
| "grad_norm": 1.9389700889587402, | |
| "learning_rate": 4.999916660653921e-05, | |
| "loss": 0.3684, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 0.5146198830409356, | |
| "grad_norm": 2.749030351638794, | |
| "learning_rate": 4.999894523795781e-05, | |
| "loss": 0.5419, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 0.5160818713450293, | |
| "grad_norm": 1.8607683181762695, | |
| "learning_rate": 4.999869782678712e-05, | |
| "loss": 0.3614, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 0.5175438596491229, | |
| "grad_norm": 1.6071836948394775, | |
| "learning_rate": 4.999842437328489e-05, | |
| "loss": 0.3015, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 0.5190058479532164, | |
| "grad_norm": 1.7129038572311401, | |
| "learning_rate": 4.999812487773597e-05, | |
| "loss": 0.3431, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.52046783625731, | |
| "grad_norm": 1.9856266975402832, | |
| "learning_rate": 4.9997799340452365e-05, | |
| "loss": 0.365, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 0.5219298245614035, | |
| "grad_norm": 1.5447169542312622, | |
| "learning_rate": 4.999744776177321e-05, | |
| "loss": 0.3747, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 0.5233918128654971, | |
| "grad_norm": 2.341623067855835, | |
| "learning_rate": 4.999707014206475e-05, | |
| "loss": 0.4512, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 0.5248538011695907, | |
| "grad_norm": 1.6096324920654297, | |
| "learning_rate": 4.9996666481720386e-05, | |
| "loss": 0.3394, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 0.5263157894736842, | |
| "grad_norm": 1.744593620300293, | |
| "learning_rate": 4.999623678116061e-05, | |
| "loss": 0.28, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.5277777777777778, | |
| "grad_norm": 3.5207672119140625, | |
| "learning_rate": 4.999578104083307e-05, | |
| "loss": 0.8074, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 0.5292397660818714, | |
| "grad_norm": 2.0043256282806396, | |
| "learning_rate": 4.9995299261212536e-05, | |
| "loss": 0.4611, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 0.5307017543859649, | |
| "grad_norm": 2.1335196495056152, | |
| "learning_rate": 4.99947914428009e-05, | |
| "loss": 0.4772, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 0.5321637426900585, | |
| "grad_norm": 2.2432262897491455, | |
| "learning_rate": 4.9994257586127175e-05, | |
| "loss": 0.3972, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 0.533625730994152, | |
| "grad_norm": 1.7606252431869507, | |
| "learning_rate": 4.999369769174751e-05, | |
| "loss": 0.4233, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.5350877192982456, | |
| "grad_norm": 1.7537559270858765, | |
| "learning_rate": 4.999311176024517e-05, | |
| "loss": 0.3685, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 0.5365497076023392, | |
| "grad_norm": 2.2795956134796143, | |
| "learning_rate": 4.999249979223055e-05, | |
| "loss": 0.4321, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 0.5380116959064327, | |
| "grad_norm": 1.7395251989364624, | |
| "learning_rate": 4.9991861788341165e-05, | |
| "loss": 0.4683, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 0.5394736842105263, | |
| "grad_norm": 2.5909295082092285, | |
| "learning_rate": 4.999119774924166e-05, | |
| "loss": 0.4694, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 0.5409356725146199, | |
| "grad_norm": 1.6204602718353271, | |
| "learning_rate": 4.999050767562379e-05, | |
| "loss": 0.3199, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.5423976608187134, | |
| "grad_norm": 2.1818158626556396, | |
| "learning_rate": 4.9989791568206434e-05, | |
| "loss": 0.4531, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 0.543859649122807, | |
| "grad_norm": 1.8216023445129395, | |
| "learning_rate": 4.99890494277356e-05, | |
| "loss": 0.278, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 0.5453216374269005, | |
| "grad_norm": 2.3476250171661377, | |
| "learning_rate": 4.9988281254984414e-05, | |
| "loss": 0.5406, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 0.5467836257309941, | |
| "grad_norm": 1.9644140005111694, | |
| "learning_rate": 4.998748705075311e-05, | |
| "loss": 0.466, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 0.5482456140350878, | |
| "grad_norm": 1.4741235971450806, | |
| "learning_rate": 4.9986666815869054e-05, | |
| "loss": 0.2702, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.5497076023391813, | |
| "grad_norm": 1.9738681316375732, | |
| "learning_rate": 4.998582055118672e-05, | |
| "loss": 0.3656, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 0.5511695906432749, | |
| "grad_norm": 1.780532956123352, | |
| "learning_rate": 4.998494825758771e-05, | |
| "loss": 0.3667, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 0.5526315789473685, | |
| "grad_norm": 1.9166702032089233, | |
| "learning_rate": 4.9984049935980726e-05, | |
| "loss": 0.377, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 0.554093567251462, | |
| "grad_norm": 2.0257017612457275, | |
| "learning_rate": 4.998312558730159e-05, | |
| "loss": 0.4267, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 0.5555555555555556, | |
| "grad_norm": 1.9673963785171509, | |
| "learning_rate": 4.998217521251326e-05, | |
| "loss": 0.4208, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.5570175438596491, | |
| "grad_norm": 2.2184839248657227, | |
| "learning_rate": 4.998119881260576e-05, | |
| "loss": 0.5568, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 0.5584795321637427, | |
| "grad_norm": 1.660023808479309, | |
| "learning_rate": 4.9980196388596255e-05, | |
| "loss": 0.3989, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 0.5599415204678363, | |
| "grad_norm": 2.1084377765655518, | |
| "learning_rate": 4.9979167941529034e-05, | |
| "loss": 0.4559, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 0.5614035087719298, | |
| "grad_norm": 2.341717004776001, | |
| "learning_rate": 4.997811347247548e-05, | |
| "loss": 0.5585, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 0.5628654970760234, | |
| "grad_norm": 2.5732650756835938, | |
| "learning_rate": 4.997703298253406e-05, | |
| "loss": 0.4662, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.564327485380117, | |
| "grad_norm": 1.9843530654907227, | |
| "learning_rate": 4.9975926472830395e-05, | |
| "loss": 0.5027, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 0.5657894736842105, | |
| "grad_norm": 1.9600590467453003, | |
| "learning_rate": 4.997479394451717e-05, | |
| "loss": 0.3689, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 0.5672514619883041, | |
| "grad_norm": 2.2262799739837646, | |
| "learning_rate": 4.997363539877422e-05, | |
| "loss": 0.4345, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 0.5687134502923976, | |
| "grad_norm": 1.883344054222107, | |
| "learning_rate": 4.997245083680843e-05, | |
| "loss": 0.4418, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 0.5701754385964912, | |
| "grad_norm": 1.7878280878067017, | |
| "learning_rate": 4.997124025985383e-05, | |
| "loss": 0.3656, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.5716374269005848, | |
| "grad_norm": 2.706404685974121, | |
| "learning_rate": 4.9970003669171525e-05, | |
| "loss": 0.5461, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 0.5730994152046783, | |
| "grad_norm": 1.806484580039978, | |
| "learning_rate": 4.996874106604974e-05, | |
| "loss": 0.4398, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 0.5745614035087719, | |
| "grad_norm": 2.131579875946045, | |
| "learning_rate": 4.9967452451803784e-05, | |
| "loss": 0.4419, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 0.5760233918128655, | |
| "grad_norm": 1.7251105308532715, | |
| "learning_rate": 4.996613782777607e-05, | |
| "loss": 0.3886, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 0.577485380116959, | |
| "grad_norm": 2.2064549922943115, | |
| "learning_rate": 4.996479719533611e-05, | |
| "loss": 0.3766, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.5789473684210527, | |
| "grad_norm": 2.3021111488342285, | |
| "learning_rate": 4.996343055588049e-05, | |
| "loss": 0.4369, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 0.5804093567251462, | |
| "grad_norm": 1.6880431175231934, | |
| "learning_rate": 4.996203791083291e-05, | |
| "loss": 0.3367, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 0.5818713450292398, | |
| "grad_norm": 2.097285032272339, | |
| "learning_rate": 4.996061926164416e-05, | |
| "loss": 0.4358, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 0.5833333333333334, | |
| "grad_norm": 1.8281804323196411, | |
| "learning_rate": 4.99591746097921e-05, | |
| "loss": 0.3585, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 0.5847953216374269, | |
| "grad_norm": 2.1264681816101074, | |
| "learning_rate": 4.995770395678171e-05, | |
| "loss": 0.3761, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.5862573099415205, | |
| "grad_norm": 1.7255804538726807, | |
| "learning_rate": 4.995620730414502e-05, | |
| "loss": 0.3966, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 0.5877192982456141, | |
| "grad_norm": 2.54319429397583, | |
| "learning_rate": 4.995468465344119e-05, | |
| "loss": 0.5814, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 0.5891812865497076, | |
| "grad_norm": 2.460178852081299, | |
| "learning_rate": 4.9953136006256415e-05, | |
| "loss": 0.7037, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 0.5906432748538012, | |
| "grad_norm": 2.0374484062194824, | |
| "learning_rate": 4.9951561364204006e-05, | |
| "loss": 0.4698, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 0.5921052631578947, | |
| "grad_norm": 1.8541250228881836, | |
| "learning_rate": 4.994996072892434e-05, | |
| "loss": 0.3881, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.5935672514619883, | |
| "grad_norm": 1.8663760423660278, | |
| "learning_rate": 4.994833410208487e-05, | |
| "loss": 0.4504, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 0.5950292397660819, | |
| "grad_norm": 1.7711328268051147, | |
| "learning_rate": 4.994668148538013e-05, | |
| "loss": 0.4055, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 0.5964912280701754, | |
| "grad_norm": 1.6116139888763428, | |
| "learning_rate": 4.994500288053174e-05, | |
| "loss": 0.3853, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 0.597953216374269, | |
| "grad_norm": 1.6673407554626465, | |
| "learning_rate": 4.994329828928838e-05, | |
| "loss": 0.3747, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 0.5994152046783626, | |
| "grad_norm": 1.8015586137771606, | |
| "learning_rate": 4.994156771342579e-05, | |
| "loss": 0.3994, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.6008771929824561, | |
| "grad_norm": 1.781525731086731, | |
| "learning_rate": 4.993981115474681e-05, | |
| "loss": 0.3742, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 0.6023391812865497, | |
| "grad_norm": 1.8017756938934326, | |
| "learning_rate": 4.9938028615081315e-05, | |
| "loss": 0.3971, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 0.6038011695906432, | |
| "grad_norm": 2.0826539993286133, | |
| "learning_rate": 4.993622009628628e-05, | |
| "loss": 0.3808, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 0.6052631578947368, | |
| "grad_norm": 1.8292765617370605, | |
| "learning_rate": 4.99343856002457e-05, | |
| "loss": 0.3772, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 0.6067251461988304, | |
| "grad_norm": 1.644363522529602, | |
| "learning_rate": 4.993252512887069e-05, | |
| "loss": 0.3937, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.6081871345029239, | |
| "grad_norm": 2.1542656421661377, | |
| "learning_rate": 4.993063868409936e-05, | |
| "loss": 0.5146, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 0.6096491228070176, | |
| "grad_norm": 1.9757295846939087, | |
| "learning_rate": 4.9928726267896924e-05, | |
| "loss": 0.4166, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 0.6111111111111112, | |
| "grad_norm": 2.384495258331299, | |
| "learning_rate": 4.9926787882255636e-05, | |
| "loss": 0.43, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 0.6125730994152047, | |
| "grad_norm": 2.5664448738098145, | |
| "learning_rate": 4.99248235291948e-05, | |
| "loss": 0.5663, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 0.6140350877192983, | |
| "grad_norm": 2.3594064712524414, | |
| "learning_rate": 4.992283321076079e-05, | |
| "loss": 0.4803, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.6154970760233918, | |
| "grad_norm": 1.9946856498718262, | |
| "learning_rate": 4.992081692902699e-05, | |
| "loss": 0.3847, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 0.6169590643274854, | |
| "grad_norm": 1.8300626277923584, | |
| "learning_rate": 4.9918774686093885e-05, | |
| "loss": 0.3672, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 0.618421052631579, | |
| "grad_norm": 1.8371638059616089, | |
| "learning_rate": 4.991670648408895e-05, | |
| "loss": 0.3641, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 0.6198830409356725, | |
| "grad_norm": 2.318730115890503, | |
| "learning_rate": 4.991461232516675e-05, | |
| "loss": 0.4443, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 0.6213450292397661, | |
| "grad_norm": 1.766786813735962, | |
| "learning_rate": 4.9912492211508855e-05, | |
| "loss": 0.4328, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.6228070175438597, | |
| "grad_norm": 2.2804787158966064, | |
| "learning_rate": 4.9910346145323906e-05, | |
| "loss": 0.4936, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 0.6242690058479532, | |
| "grad_norm": 1.5067193508148193, | |
| "learning_rate": 4.990817412884754e-05, | |
| "loss": 0.3576, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 0.6257309941520468, | |
| "grad_norm": 2.2982981204986572, | |
| "learning_rate": 4.990597616434246e-05, | |
| "loss": 0.5083, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 0.6271929824561403, | |
| "grad_norm": 1.7265206575393677, | |
| "learning_rate": 4.99037522540984e-05, | |
| "loss": 0.3529, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 0.6286549707602339, | |
| "grad_norm": 1.8511167764663696, | |
| "learning_rate": 4.99015024004321e-05, | |
| "loss": 0.4129, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.6301169590643275, | |
| "grad_norm": 2.0598514080047607, | |
| "learning_rate": 4.9899226605687344e-05, | |
| "loss": 0.5372, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 0.631578947368421, | |
| "grad_norm": 1.7002969980239868, | |
| "learning_rate": 4.989692487223493e-05, | |
| "loss": 0.4971, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 0.6330409356725146, | |
| "grad_norm": 1.8592265844345093, | |
| "learning_rate": 4.9894597202472696e-05, | |
| "loss": 0.396, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 0.6345029239766082, | |
| "grad_norm": 1.6110180616378784, | |
| "learning_rate": 4.989224359882547e-05, | |
| "loss": 0.3312, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 0.6359649122807017, | |
| "grad_norm": 1.9149975776672363, | |
| "learning_rate": 4.988986406374512e-05, | |
| "loss": 0.5386, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.6374269005847953, | |
| "grad_norm": 2.1653451919555664, | |
| "learning_rate": 4.9887458599710526e-05, | |
| "loss": 0.4666, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 0.6388888888888888, | |
| "grad_norm": 1.868247389793396, | |
| "learning_rate": 4.9885027209227575e-05, | |
| "loss": 0.4783, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 0.6403508771929824, | |
| "grad_norm": 2.091641664505005, | |
| "learning_rate": 4.9882569894829144e-05, | |
| "loss": 0.3685, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 0.6418128654970761, | |
| "grad_norm": 1.8764753341674805, | |
| "learning_rate": 4.9880086659075156e-05, | |
| "loss": 0.3313, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 0.6432748538011696, | |
| "grad_norm": 2.0913355350494385, | |
| "learning_rate": 4.987757750455251e-05, | |
| "loss": 0.4106, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.6447368421052632, | |
| "grad_norm": 2.0343973636627197, | |
| "learning_rate": 4.9875042433875105e-05, | |
| "loss": 0.5114, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 0.6461988304093568, | |
| "grad_norm": 3.2243010997772217, | |
| "learning_rate": 4.9872481449683844e-05, | |
| "loss": 0.6282, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 0.6476608187134503, | |
| "grad_norm": 2.0600948333740234, | |
| "learning_rate": 4.986989455464663e-05, | |
| "loss": 0.3549, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 0.6491228070175439, | |
| "grad_norm": 2.429845094680786, | |
| "learning_rate": 4.986728175145836e-05, | |
| "loss": 0.2972, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 0.6505847953216374, | |
| "grad_norm": 1.7953526973724365, | |
| "learning_rate": 4.986464304284091e-05, | |
| "loss": 0.3382, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.652046783625731, | |
| "grad_norm": 2.2379395961761475, | |
| "learning_rate": 4.9861978431543145e-05, | |
| "loss": 0.5618, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 0.6535087719298246, | |
| "grad_norm": 1.5855752229690552, | |
| "learning_rate": 4.9859287920340915e-05, | |
| "loss": 0.2735, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 0.6549707602339181, | |
| "grad_norm": 1.7404121160507202, | |
| "learning_rate": 4.985657151203706e-05, | |
| "loss": 0.4535, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 0.6564327485380117, | |
| "grad_norm": 2.5131490230560303, | |
| "learning_rate": 4.98538292094614e-05, | |
| "loss": 0.6326, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 0.6578947368421053, | |
| "grad_norm": 1.636088490486145, | |
| "learning_rate": 4.98510610154707e-05, | |
| "loss": 0.4868, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.6593567251461988, | |
| "grad_norm": 1.4509011507034302, | |
| "learning_rate": 4.984826693294874e-05, | |
| "loss": 0.2488, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 0.6608187134502924, | |
| "grad_norm": 2.273134708404541, | |
| "learning_rate": 4.984544696480624e-05, | |
| "loss": 0.4622, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 0.6622807017543859, | |
| "grad_norm": 1.9466441869735718, | |
| "learning_rate": 4.9842601113980886e-05, | |
| "loss": 0.4909, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 0.6637426900584795, | |
| "grad_norm": 2.194655418395996, | |
| "learning_rate": 4.983972938343735e-05, | |
| "loss": 0.4337, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 0.6652046783625731, | |
| "grad_norm": 2.2090723514556885, | |
| "learning_rate": 4.9836831776167245e-05, | |
| "loss": 0.5377, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.6666666666666666, | |
| "grad_norm": 1.5295518636703491, | |
| "learning_rate": 4.983390829518914e-05, | |
| "loss": 0.347, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 0.6681286549707602, | |
| "grad_norm": 1.8688359260559082, | |
| "learning_rate": 4.983095894354858e-05, | |
| "loss": 0.3858, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 0.6695906432748538, | |
| "grad_norm": 1.6634221076965332, | |
| "learning_rate": 4.982798372431803e-05, | |
| "loss": 0.3241, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 0.6710526315789473, | |
| "grad_norm": 2.1221494674682617, | |
| "learning_rate": 4.982498264059692e-05, | |
| "loss": 0.4185, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 0.672514619883041, | |
| "grad_norm": 1.8509141206741333, | |
| "learning_rate": 4.982195569551162e-05, | |
| "loss": 0.5775, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.6739766081871345, | |
| "grad_norm": 1.8871842622756958, | |
| "learning_rate": 4.981890289221546e-05, | |
| "loss": 0.4737, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 0.6754385964912281, | |
| "grad_norm": 1.8580923080444336, | |
| "learning_rate": 4.9815824233888664e-05, | |
| "loss": 0.4307, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 0.6769005847953217, | |
| "grad_norm": 1.9768396615982056, | |
| "learning_rate": 4.9812719723738435e-05, | |
| "loss": 0.4624, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 0.6783625730994152, | |
| "grad_norm": 1.8355333805084229, | |
| "learning_rate": 4.980958936499888e-05, | |
| "loss": 0.4705, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 0.6798245614035088, | |
| "grad_norm": 1.911109209060669, | |
| "learning_rate": 4.9806433160931044e-05, | |
| "loss": 0.3952, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.6812865497076024, | |
| "grad_norm": 1.5615458488464355, | |
| "learning_rate": 4.98032511148229e-05, | |
| "loss": 0.3498, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 0.6827485380116959, | |
| "grad_norm": 1.5741957426071167, | |
| "learning_rate": 4.980004322998933e-05, | |
| "loss": 0.3692, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 0.6842105263157895, | |
| "grad_norm": 1.853812336921692, | |
| "learning_rate": 4.9796809509772145e-05, | |
| "loss": 0.4068, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 0.685672514619883, | |
| "grad_norm": 1.6525896787643433, | |
| "learning_rate": 4.979354995754006e-05, | |
| "loss": 0.3492, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 0.6871345029239766, | |
| "grad_norm": 1.506797432899475, | |
| "learning_rate": 4.979026457668871e-05, | |
| "loss": 0.4271, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.6885964912280702, | |
| "grad_norm": 1.5811541080474854, | |
| "learning_rate": 4.978695337064063e-05, | |
| "loss": 0.4028, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 0.6900584795321637, | |
| "grad_norm": 1.9363372325897217, | |
| "learning_rate": 4.9783616342845265e-05, | |
| "loss": 0.4188, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 0.6915204678362573, | |
| "grad_norm": 2.290966749191284, | |
| "learning_rate": 4.978025349677895e-05, | |
| "loss": 0.4883, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 0.6929824561403509, | |
| "grad_norm": 2.7394368648529053, | |
| "learning_rate": 4.977686483594492e-05, | |
| "loss": 0.3802, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 0.6944444444444444, | |
| "grad_norm": 1.7522988319396973, | |
| "learning_rate": 4.977345036387331e-05, | |
| "loss": 0.3625, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.695906432748538, | |
| "grad_norm": 2.302976608276367, | |
| "learning_rate": 4.977001008412113e-05, | |
| "loss": 0.5672, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 0.6973684210526315, | |
| "grad_norm": 1.7930705547332764, | |
| "learning_rate": 4.9766544000272296e-05, | |
| "loss": 0.4035, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 0.6988304093567251, | |
| "grad_norm": 1.8383108377456665, | |
| "learning_rate": 4.976305211593758e-05, | |
| "loss": 0.3863, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 0.7002923976608187, | |
| "grad_norm": 1.6916474103927612, | |
| "learning_rate": 4.975953443475465e-05, | |
| "loss": 0.3741, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 0.7017543859649122, | |
| "grad_norm": 1.9730843305587769, | |
| "learning_rate": 4.975599096038804e-05, | |
| "loss": 0.5164, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.7032163742690059, | |
| "grad_norm": 1.9919596910476685, | |
| "learning_rate": 4.9752421696529164e-05, | |
| "loss": 0.4424, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 0.7046783625730995, | |
| "grad_norm": 2.2427196502685547, | |
| "learning_rate": 4.974882664689627e-05, | |
| "loss": 0.4054, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 0.706140350877193, | |
| "grad_norm": 1.5932695865631104, | |
| "learning_rate": 4.974520581523452e-05, | |
| "loss": 0.3196, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 0.7076023391812866, | |
| "grad_norm": 2.0366060733795166, | |
| "learning_rate": 4.9741559205315887e-05, | |
| "loss": 0.4138, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 0.7090643274853801, | |
| "grad_norm": 1.6552878618240356, | |
| "learning_rate": 4.973788682093923e-05, | |
| "loss": 0.3856, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.7105263157894737, | |
| "grad_norm": 2.3496367931365967, | |
| "learning_rate": 4.973418866593023e-05, | |
| "loss": 0.4812, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 0.7119883040935673, | |
| "grad_norm": 1.6886743307113647, | |
| "learning_rate": 4.9730464744141445e-05, | |
| "loss": 0.3486, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 0.7134502923976608, | |
| "grad_norm": 3.122567653656006, | |
| "learning_rate": 4.972671505945227e-05, | |
| "loss": 0.5483, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 0.7149122807017544, | |
| "grad_norm": 2.0637013912200928, | |
| "learning_rate": 4.972293961576891e-05, | |
| "loss": 0.4322, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 0.716374269005848, | |
| "grad_norm": 1.9868749380111694, | |
| "learning_rate": 4.971913841702443e-05, | |
| "loss": 0.5018, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.7178362573099415, | |
| "grad_norm": 2.0727813243865967, | |
| "learning_rate": 4.971531146717873e-05, | |
| "loss": 0.5054, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 0.7192982456140351, | |
| "grad_norm": 1.7431650161743164, | |
| "learning_rate": 4.97114587702185e-05, | |
| "loss": 0.4233, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 0.7207602339181286, | |
| "grad_norm": 1.752251148223877, | |
| "learning_rate": 4.970758033015731e-05, | |
| "loss": 0.3652, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 0.7222222222222222, | |
| "grad_norm": 1.9212924242019653, | |
| "learning_rate": 4.970367615103549e-05, | |
| "loss": 0.4639, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 0.7236842105263158, | |
| "grad_norm": 1.5945523977279663, | |
| "learning_rate": 4.969974623692023e-05, | |
| "loss": 0.4185, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.7251461988304093, | |
| "grad_norm": 1.7479863166809082, | |
| "learning_rate": 4.969579059190549e-05, | |
| "loss": 0.5139, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 0.7266081871345029, | |
| "grad_norm": 1.9763168096542358, | |
| "learning_rate": 4.969180922011206e-05, | |
| "loss": 0.3539, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 0.7280701754385965, | |
| "grad_norm": 1.7572076320648193, | |
| "learning_rate": 4.968780212568752e-05, | |
| "loss": 0.3796, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 0.72953216374269, | |
| "grad_norm": 1.905580997467041, | |
| "learning_rate": 4.968376931280626e-05, | |
| "loss": 0.4989, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 0.7309941520467836, | |
| "grad_norm": 1.6628644466400146, | |
| "learning_rate": 4.9679710785669454e-05, | |
| "loss": 0.3901, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.7324561403508771, | |
| "grad_norm": 1.544498324394226, | |
| "learning_rate": 4.967562654850505e-05, | |
| "loss": 0.4249, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 0.7339181286549707, | |
| "grad_norm": 1.582729458808899, | |
| "learning_rate": 4.9671516605567806e-05, | |
| "loss": 0.3758, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 0.7353801169590644, | |
| "grad_norm": 2.354980230331421, | |
| "learning_rate": 4.9667380961139226e-05, | |
| "loss": 0.52, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 0.7368421052631579, | |
| "grad_norm": 2.1196136474609375, | |
| "learning_rate": 4.9663219619527624e-05, | |
| "loss": 0.5812, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 0.7383040935672515, | |
| "grad_norm": 2.0975210666656494, | |
| "learning_rate": 4.965903258506806e-05, | |
| "loss": 0.4089, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.7397660818713451, | |
| "grad_norm": 2.7108922004699707, | |
| "learning_rate": 4.965481986212237e-05, | |
| "loss": 0.4879, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 0.7412280701754386, | |
| "grad_norm": 1.5601476430892944, | |
| "learning_rate": 4.965058145507915e-05, | |
| "loss": 0.3611, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 0.7426900584795322, | |
| "grad_norm": 2.0003271102905273, | |
| "learning_rate": 4.9646317368353743e-05, | |
| "loss": 0.4147, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 0.7441520467836257, | |
| "grad_norm": 1.9817136526107788, | |
| "learning_rate": 4.964202760638826e-05, | |
| "loss": 0.387, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 0.7456140350877193, | |
| "grad_norm": 1.8296986818313599, | |
| "learning_rate": 4.963771217365154e-05, | |
| "loss": 0.4455, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.7470760233918129, | |
| "grad_norm": 1.7010998725891113, | |
| "learning_rate": 4.963337107463918e-05, | |
| "loss": 0.4077, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 0.7485380116959064, | |
| "grad_norm": 2.176881790161133, | |
| "learning_rate": 4.9629004313873506e-05, | |
| "loss": 0.3918, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 1.9547199010849, | |
| "learning_rate": 4.9624611895903586e-05, | |
| "loss": 0.4587, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 0.7514619883040936, | |
| "grad_norm": 2.218224048614502, | |
| "learning_rate": 4.962019382530521e-05, | |
| "loss": 0.5855, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 0.7529239766081871, | |
| "grad_norm": 2.166445016860962, | |
| "learning_rate": 4.961575010668088e-05, | |
| "loss": 0.5484, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.7543859649122807, | |
| "grad_norm": 2.160301685333252, | |
| "learning_rate": 4.9611280744659836e-05, | |
| "loss": 0.463, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 0.7558479532163743, | |
| "grad_norm": 2.650303840637207, | |
| "learning_rate": 4.960678574389803e-05, | |
| "loss": 0.4189, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 0.7573099415204678, | |
| "grad_norm": 2.0575671195983887, | |
| "learning_rate": 4.960226510907811e-05, | |
| "loss": 0.4019, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 0.7587719298245614, | |
| "grad_norm": 2.089404821395874, | |
| "learning_rate": 4.959771884490946e-05, | |
| "loss": 0.51, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 0.7602339181286549, | |
| "grad_norm": 2.4824209213256836, | |
| "learning_rate": 4.95931469561281e-05, | |
| "loss": 0.5687, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.7616959064327485, | |
| "grad_norm": 1.8112894296646118, | |
| "learning_rate": 4.958854944749681e-05, | |
| "loss": 0.4055, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 0.7631578947368421, | |
| "grad_norm": 1.983642578125, | |
| "learning_rate": 4.958392632380503e-05, | |
| "loss": 0.4216, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 0.7646198830409356, | |
| "grad_norm": 1.9053888320922852, | |
| "learning_rate": 4.957927758986888e-05, | |
| "loss": 0.4715, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 0.7660818713450293, | |
| "grad_norm": 1.8498287200927734, | |
| "learning_rate": 4.957460325053117e-05, | |
| "loss": 0.4477, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 0.7675438596491229, | |
| "grad_norm": 1.7368321418762207, | |
| "learning_rate": 4.956990331066139e-05, | |
| "loss": 0.3244, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.7690058479532164, | |
| "grad_norm": 1.6912308931350708, | |
| "learning_rate": 4.956517777515568e-05, | |
| "loss": 0.4397, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 0.77046783625731, | |
| "grad_norm": 2.0592827796936035, | |
| "learning_rate": 4.9560426648936856e-05, | |
| "loss": 0.4041, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 0.7719298245614035, | |
| "grad_norm": 2.2229933738708496, | |
| "learning_rate": 4.9555649936954396e-05, | |
| "loss": 0.4241, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 0.7733918128654971, | |
| "grad_norm": 1.8884410858154297, | |
| "learning_rate": 4.955084764418443e-05, | |
| "loss": 0.4426, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 0.7748538011695907, | |
| "grad_norm": 2.183872938156128, | |
| "learning_rate": 4.954601977562973e-05, | |
| "loss": 0.4154, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.7763157894736842, | |
| "grad_norm": 2.583272933959961, | |
| "learning_rate": 4.954116633631972e-05, | |
| "loss": 0.4535, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 0.7777777777777778, | |
| "grad_norm": 1.8618327379226685, | |
| "learning_rate": 4.953628733131045e-05, | |
| "loss": 0.4711, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 0.7792397660818714, | |
| "grad_norm": 2.038996934890747, | |
| "learning_rate": 4.953138276568462e-05, | |
| "loss": 0.5113, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 0.7807017543859649, | |
| "grad_norm": 2.2305846214294434, | |
| "learning_rate": 4.952645264455155e-05, | |
| "loss": 0.4894, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 0.7821637426900585, | |
| "grad_norm": 2.13934063911438, | |
| "learning_rate": 4.952149697304716e-05, | |
| "loss": 0.5283, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.783625730994152, | |
| "grad_norm": 1.430418848991394, | |
| "learning_rate": 4.951651575633405e-05, | |
| "loss": 0.3615, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 0.7850877192982456, | |
| "grad_norm": 2.216066598892212, | |
| "learning_rate": 4.951150899960135e-05, | |
| "loss": 0.3938, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 0.7865497076023392, | |
| "grad_norm": 1.9449279308319092, | |
| "learning_rate": 4.9506476708064865e-05, | |
| "loss": 0.4537, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 0.7880116959064327, | |
| "grad_norm": 2.240190029144287, | |
| "learning_rate": 4.9501418886966955e-05, | |
| "loss": 0.428, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 0.7894736842105263, | |
| "grad_norm": 2.368926763534546, | |
| "learning_rate": 4.9496335541576595e-05, | |
| "loss": 0.4989, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.7909356725146199, | |
| "grad_norm": 1.9143961668014526, | |
| "learning_rate": 4.949122667718935e-05, | |
| "loss": 0.3476, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 0.7923976608187134, | |
| "grad_norm": 2.0308244228363037, | |
| "learning_rate": 4.9486092299127366e-05, | |
| "loss": 0.4366, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 0.793859649122807, | |
| "grad_norm": 2.0171005725860596, | |
| "learning_rate": 4.948093241273938e-05, | |
| "loss": 0.4345, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 0.7953216374269005, | |
| "grad_norm": 2.3451991081237793, | |
| "learning_rate": 4.947574702340067e-05, | |
| "loss": 0.5557, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 0.7967836257309941, | |
| "grad_norm": 2.4183530807495117, | |
| "learning_rate": 4.9470536136513114e-05, | |
| "loss": 0.5053, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.7982456140350878, | |
| "grad_norm": 1.8835848569869995, | |
| "learning_rate": 4.946529975750514e-05, | |
| "loss": 0.5564, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 0.7997076023391813, | |
| "grad_norm": 1.7763210535049438, | |
| "learning_rate": 4.946003789183173e-05, | |
| "loss": 0.4679, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 0.8011695906432749, | |
| "grad_norm": 1.696906566619873, | |
| "learning_rate": 4.945475054497443e-05, | |
| "loss": 0.3111, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 0.8026315789473685, | |
| "grad_norm": 2.0995283126831055, | |
| "learning_rate": 4.944943772244131e-05, | |
| "loss": 0.3954, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 0.804093567251462, | |
| "grad_norm": 1.713724136352539, | |
| "learning_rate": 4.944409942976699e-05, | |
| "loss": 0.4271, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.8055555555555556, | |
| "grad_norm": 2.8676769733428955, | |
| "learning_rate": 4.943873567251262e-05, | |
| "loss": 0.4477, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 0.8070175438596491, | |
| "grad_norm": 1.5396246910095215, | |
| "learning_rate": 4.94333464562659e-05, | |
| "loss": 0.3187, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 0.8084795321637427, | |
| "grad_norm": 1.6387983560562134, | |
| "learning_rate": 4.9427931786641e-05, | |
| "loss": 0.2871, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 0.8099415204678363, | |
| "grad_norm": 1.7519559860229492, | |
| "learning_rate": 4.942249166927867e-05, | |
| "loss": 0.4239, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 0.8114035087719298, | |
| "grad_norm": 2.5054068565368652, | |
| "learning_rate": 4.941702610984612e-05, | |
| "loss": 0.4863, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.8128654970760234, | |
| "grad_norm": 2.1645209789276123, | |
| "learning_rate": 4.941153511403709e-05, | |
| "loss": 0.5865, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 0.814327485380117, | |
| "grad_norm": 2.2612287998199463, | |
| "learning_rate": 4.9406018687571816e-05, | |
| "loss": 0.5705, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 0.8157894736842105, | |
| "grad_norm": 1.9473901987075806, | |
| "learning_rate": 4.9400476836197014e-05, | |
| "loss": 0.4731, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 0.8172514619883041, | |
| "grad_norm": 2.250812530517578, | |
| "learning_rate": 4.9394909565685894e-05, | |
| "loss": 0.5051, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 0.8187134502923976, | |
| "grad_norm": 2.19724440574646, | |
| "learning_rate": 4.938931688183815e-05, | |
| "loss": 0.5666, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.8201754385964912, | |
| "grad_norm": 2.4907655715942383, | |
| "learning_rate": 4.9383698790479946e-05, | |
| "loss": 0.4217, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 0.8216374269005848, | |
| "grad_norm": 2.0893754959106445, | |
| "learning_rate": 4.937805529746391e-05, | |
| "loss": 0.5175, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 0.8230994152046783, | |
| "grad_norm": 1.7704755067825317, | |
| "learning_rate": 4.937238640866914e-05, | |
| "loss": 0.3764, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 0.8245614035087719, | |
| "grad_norm": 1.9850884675979614, | |
| "learning_rate": 4.936669213000119e-05, | |
| "loss": 0.5432, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 0.8260233918128655, | |
| "grad_norm": 1.8370407819747925, | |
| "learning_rate": 4.9360972467392056e-05, | |
| "loss": 0.4676, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.827485380116959, | |
| "grad_norm": 1.9375964403152466, | |
| "learning_rate": 4.935522742680019e-05, | |
| "loss": 0.4966, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 0.8289473684210527, | |
| "grad_norm": 1.6922190189361572, | |
| "learning_rate": 4.934945701421046e-05, | |
| "loss": 0.309, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 0.8304093567251462, | |
| "grad_norm": 1.8439449071884155, | |
| "learning_rate": 4.93436612356342e-05, | |
| "loss": 0.456, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 0.8318713450292398, | |
| "grad_norm": 2.0624637603759766, | |
| "learning_rate": 4.9337840097109126e-05, | |
| "loss": 0.5002, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 0.8333333333333334, | |
| "grad_norm": 2.2448410987854004, | |
| "learning_rate": 4.93319936046994e-05, | |
| "loss": 0.6695, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.8347953216374269, | |
| "grad_norm": 2.0495433807373047, | |
| "learning_rate": 4.9326121764495596e-05, | |
| "loss": 0.4, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 0.8362573099415205, | |
| "grad_norm": 2.4171793460845947, | |
| "learning_rate": 4.9320224582614694e-05, | |
| "loss": 0.6087, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 0.8377192982456141, | |
| "grad_norm": 1.9952441453933716, | |
| "learning_rate": 4.931430206520006e-05, | |
| "loss": 0.4698, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 0.8391812865497076, | |
| "grad_norm": 1.5964637994766235, | |
| "learning_rate": 4.930835421842146e-05, | |
| "loss": 0.3387, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 0.8406432748538012, | |
| "grad_norm": 2.148587942123413, | |
| "learning_rate": 4.930238104847506e-05, | |
| "loss": 0.3426, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.8421052631578947, | |
| "grad_norm": 1.6622304916381836, | |
| "learning_rate": 4.929638256158339e-05, | |
| "loss": 0.3814, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 0.8435672514619883, | |
| "grad_norm": 2.045555591583252, | |
| "learning_rate": 4.929035876399535e-05, | |
| "loss": 0.3362, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 0.8450292397660819, | |
| "grad_norm": 2.1931281089782715, | |
| "learning_rate": 4.928430966198622e-05, | |
| "loss": 0.3913, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 0.8464912280701754, | |
| "grad_norm": 1.4383291006088257, | |
| "learning_rate": 4.927823526185765e-05, | |
| "loss": 0.2923, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 0.847953216374269, | |
| "grad_norm": 1.5619359016418457, | |
| "learning_rate": 4.927213556993762e-05, | |
| "loss": 0.3705, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.8494152046783626, | |
| "grad_norm": 1.9117201566696167, | |
| "learning_rate": 4.926601059258046e-05, | |
| "loss": 0.5157, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 0.8508771929824561, | |
| "grad_norm": 1.8845484256744385, | |
| "learning_rate": 4.925986033616687e-05, | |
| "loss": 0.4445, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 0.8523391812865497, | |
| "grad_norm": 1.9900391101837158, | |
| "learning_rate": 4.925368480710385e-05, | |
| "loss": 0.4843, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 0.8538011695906432, | |
| "grad_norm": 1.8824185132980347, | |
| "learning_rate": 4.924748401182475e-05, | |
| "loss": 0.3602, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 0.8552631578947368, | |
| "grad_norm": 1.8788493871688843, | |
| "learning_rate": 4.924125795678923e-05, | |
| "loss": 0.4089, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.8567251461988304, | |
| "grad_norm": 1.8428534269332886, | |
| "learning_rate": 4.923500664848326e-05, | |
| "loss": 0.3932, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 0.8581871345029239, | |
| "grad_norm": 1.9290796518325806, | |
| "learning_rate": 4.922873009341914e-05, | |
| "loss": 0.4343, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 0.8596491228070176, | |
| "grad_norm": 1.7226279973983765, | |
| "learning_rate": 4.9222428298135446e-05, | |
| "loss": 0.3974, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 0.8611111111111112, | |
| "grad_norm": 2.079890489578247, | |
| "learning_rate": 4.921610126919706e-05, | |
| "loss": 0.4883, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 0.8625730994152047, | |
| "grad_norm": 2.8552420139312744, | |
| "learning_rate": 4.920974901319515e-05, | |
| "loss": 0.5934, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.8640350877192983, | |
| "grad_norm": 1.5755919218063354, | |
| "learning_rate": 4.920337153674716e-05, | |
| "loss": 0.3446, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 0.8654970760233918, | |
| "grad_norm": 2.265284299850464, | |
| "learning_rate": 4.919696884649681e-05, | |
| "loss": 0.5121, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 0.8669590643274854, | |
| "grad_norm": 2.215454339981079, | |
| "learning_rate": 4.919054094911409e-05, | |
| "loss": 0.3496, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 0.868421052631579, | |
| "grad_norm": 2.484990358352661, | |
| "learning_rate": 4.9184087851295244e-05, | |
| "loss": 0.6217, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 0.8698830409356725, | |
| "grad_norm": 2.127047538757324, | |
| "learning_rate": 4.917760955976277e-05, | |
| "loss": 0.5688, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.8713450292397661, | |
| "grad_norm": 1.5317752361297607, | |
| "learning_rate": 4.9171106081265416e-05, | |
| "loss": 0.3787, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 0.8728070175438597, | |
| "grad_norm": 1.934916615486145, | |
| "learning_rate": 4.916457742257816e-05, | |
| "loss": 0.3755, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 0.8742690058479532, | |
| "grad_norm": 2.281262159347534, | |
| "learning_rate": 4.915802359050222e-05, | |
| "loss": 0.5163, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 0.8757309941520468, | |
| "grad_norm": 1.5686475038528442, | |
| "learning_rate": 4.915144459186502e-05, | |
| "loss": 0.3343, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 0.8771929824561403, | |
| "grad_norm": 1.7507493495941162, | |
| "learning_rate": 4.9144840433520245e-05, | |
| "loss": 0.4965, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.8786549707602339, | |
| "grad_norm": 1.8117406368255615, | |
| "learning_rate": 4.9138211122347736e-05, | |
| "loss": 0.4511, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 0.8801169590643275, | |
| "grad_norm": 2.0598530769348145, | |
| "learning_rate": 4.913155666525357e-05, | |
| "loss": 0.4598, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 0.881578947368421, | |
| "grad_norm": 1.7195734977722168, | |
| "learning_rate": 4.9124877069170017e-05, | |
| "loss": 0.409, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 0.8830409356725146, | |
| "grad_norm": 1.5387144088745117, | |
| "learning_rate": 4.9118172341055516e-05, | |
| "loss": 0.3136, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 0.8845029239766082, | |
| "grad_norm": 2.1562612056732178, | |
| "learning_rate": 4.9111442487894705e-05, | |
| "loss": 0.423, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.8859649122807017, | |
| "grad_norm": 2.1832849979400635, | |
| "learning_rate": 4.91046875166984e-05, | |
| "loss": 0.4298, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 0.8874269005847953, | |
| "grad_norm": 1.7478790283203125, | |
| "learning_rate": 4.9097907434503564e-05, | |
| "loss": 0.4456, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 0.8888888888888888, | |
| "grad_norm": 2.050065279006958, | |
| "learning_rate": 4.909110224837334e-05, | |
| "loss": 0.4309, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 0.8903508771929824, | |
| "grad_norm": 1.7566088438034058, | |
| "learning_rate": 4.9084271965397014e-05, | |
| "loss": 0.3601, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 0.8918128654970761, | |
| "grad_norm": 1.9086076021194458, | |
| "learning_rate": 4.907741659269001e-05, | |
| "loss": 0.4556, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.8932748538011696, | |
| "grad_norm": 2.1648757457733154, | |
| "learning_rate": 4.9070536137393896e-05, | |
| "loss": 0.3348, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 0.8947368421052632, | |
| "grad_norm": 1.8173099756240845, | |
| "learning_rate": 4.9063630606676375e-05, | |
| "loss": 0.4545, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 0.8961988304093568, | |
| "grad_norm": 1.8049899339675903, | |
| "learning_rate": 4.905670000773126e-05, | |
| "loss": 0.4211, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 0.8976608187134503, | |
| "grad_norm": 2.503659248352051, | |
| "learning_rate": 4.9049744347778493e-05, | |
| "loss": 0.4539, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 0.8991228070175439, | |
| "grad_norm": 1.674514651298523, | |
| "learning_rate": 4.9042763634064114e-05, | |
| "loss": 0.37, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.9005847953216374, | |
| "grad_norm": 1.9637904167175293, | |
| "learning_rate": 4.9035757873860254e-05, | |
| "loss": 0.4675, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 0.902046783625731, | |
| "grad_norm": 1.937668800354004, | |
| "learning_rate": 4.902872707446516e-05, | |
| "loss": 0.3884, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 0.9035087719298246, | |
| "grad_norm": 2.0102808475494385, | |
| "learning_rate": 4.9021671243203135e-05, | |
| "loss": 0.3821, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 0.9049707602339181, | |
| "grad_norm": 1.9879564046859741, | |
| "learning_rate": 4.90145903874246e-05, | |
| "loss": 0.4104, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 0.9064327485380117, | |
| "grad_norm": 1.9384711980819702, | |
| "learning_rate": 4.9007484514505984e-05, | |
| "loss": 0.4689, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.9078947368421053, | |
| "grad_norm": 1.6358801126480103, | |
| "learning_rate": 4.9000353631849835e-05, | |
| "loss": 0.3419, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 0.9093567251461988, | |
| "grad_norm": 1.589561104774475, | |
| "learning_rate": 4.899319774688473e-05, | |
| "loss": 0.3413, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 0.9108187134502924, | |
| "grad_norm": 2.4100232124328613, | |
| "learning_rate": 4.898601686706529e-05, | |
| "loss": 0.5383, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 0.9122807017543859, | |
| "grad_norm": 1.9309585094451904, | |
| "learning_rate": 4.897881099987218e-05, | |
| "loss": 0.3143, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 0.9137426900584795, | |
| "grad_norm": 2.0240402221679688, | |
| "learning_rate": 4.897158015281209e-05, | |
| "loss": 0.4833, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.9152046783625731, | |
| "grad_norm": 1.9347280263900757, | |
| "learning_rate": 4.8964324333417756e-05, | |
| "loss": 0.4518, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 0.9166666666666666, | |
| "grad_norm": 1.837488055229187, | |
| "learning_rate": 4.8957043549247886e-05, | |
| "loss": 0.3971, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 0.9181286549707602, | |
| "grad_norm": 2.1595449447631836, | |
| "learning_rate": 4.894973780788722e-05, | |
| "loss": 0.5172, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 0.9195906432748538, | |
| "grad_norm": 1.5503318309783936, | |
| "learning_rate": 4.894240711694652e-05, | |
| "loss": 0.3518, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 0.9210526315789473, | |
| "grad_norm": 2.344728708267212, | |
| "learning_rate": 4.893505148406249e-05, | |
| "loss": 0.7296, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.922514619883041, | |
| "grad_norm": 1.6055322885513306, | |
| "learning_rate": 4.892767091689786e-05, | |
| "loss": 0.3867, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 0.9239766081871345, | |
| "grad_norm": 1.9131611585617065, | |
| "learning_rate": 4.8920265423141296e-05, | |
| "loss": 0.3318, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 0.9254385964912281, | |
| "grad_norm": 11.020429611206055, | |
| "learning_rate": 4.891283501050748e-05, | |
| "loss": 0.7778, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 0.9269005847953217, | |
| "grad_norm": 1.9416626691818237, | |
| "learning_rate": 4.890537968673701e-05, | |
| "loss": 0.348, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 0.9283625730994152, | |
| "grad_norm": 1.9904481172561646, | |
| "learning_rate": 4.8897899459596454e-05, | |
| "loss": 0.4469, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.9298245614035088, | |
| "grad_norm": 1.3741624355316162, | |
| "learning_rate": 4.889039433687831e-05, | |
| "loss": 0.2929, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 0.9312865497076024, | |
| "grad_norm": 1.7253600358963013, | |
| "learning_rate": 4.888286432640104e-05, | |
| "loss": 0.4367, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 0.9327485380116959, | |
| "grad_norm": 2.8955740928649902, | |
| "learning_rate": 4.8875309436009e-05, | |
| "loss": 0.5976, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 0.9342105263157895, | |
| "grad_norm": 2.3306732177734375, | |
| "learning_rate": 4.8867729673572484e-05, | |
| "loss": 0.4295, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 0.935672514619883, | |
| "grad_norm": 1.5759066343307495, | |
| "learning_rate": 4.886012504698769e-05, | |
| "loss": 0.4069, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.9371345029239766, | |
| "grad_norm": 1.9031813144683838, | |
| "learning_rate": 4.8852495564176716e-05, | |
| "loss": 0.5287, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 0.9385964912280702, | |
| "grad_norm": 2.0213451385498047, | |
| "learning_rate": 4.884484123308756e-05, | |
| "loss": 0.482, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 0.9400584795321637, | |
| "grad_norm": 1.7652124166488647, | |
| "learning_rate": 4.88371620616941e-05, | |
| "loss": 0.3564, | |
| "step": 643 | |
| }, | |
| { | |
| "epoch": 0.9415204678362573, | |
| "grad_norm": 2.3317337036132812, | |
| "learning_rate": 4.8829458057996104e-05, | |
| "loss": 0.5466, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 0.9429824561403509, | |
| "grad_norm": 1.7664676904678345, | |
| "learning_rate": 4.8821729230019176e-05, | |
| "loss": 0.4002, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.9444444444444444, | |
| "grad_norm": 2.157252311706543, | |
| "learning_rate": 4.881397558581483e-05, | |
| "loss": 0.6214, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 0.945906432748538, | |
| "grad_norm": 1.5458041429519653, | |
| "learning_rate": 4.880619713346039e-05, | |
| "loss": 0.3727, | |
| "step": 647 | |
| }, | |
| { | |
| "epoch": 0.9473684210526315, | |
| "grad_norm": 1.699417233467102, | |
| "learning_rate": 4.879839388105904e-05, | |
| "loss": 0.4125, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 0.9488304093567251, | |
| "grad_norm": 1.8369117975234985, | |
| "learning_rate": 4.87905658367398e-05, | |
| "loss": 0.4701, | |
| "step": 649 | |
| }, | |
| { | |
| "epoch": 0.9502923976608187, | |
| "grad_norm": 1.6099762916564941, | |
| "learning_rate": 4.878271300865752e-05, | |
| "loss": 0.3281, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.9517543859649122, | |
| "grad_norm": 1.8754112720489502, | |
| "learning_rate": 4.877483540499286e-05, | |
| "loss": 0.4618, | |
| "step": 651 | |
| }, | |
| { | |
| "epoch": 0.9532163742690059, | |
| "grad_norm": 1.7260364294052124, | |
| "learning_rate": 4.876693303395229e-05, | |
| "loss": 0.3641, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 0.9546783625730995, | |
| "grad_norm": 1.7574645280838013, | |
| "learning_rate": 4.875900590376811e-05, | |
| "loss": 0.4648, | |
| "step": 653 | |
| }, | |
| { | |
| "epoch": 0.956140350877193, | |
| "grad_norm": 1.5437654256820679, | |
| "learning_rate": 4.875105402269835e-05, | |
| "loss": 0.4298, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 0.9576023391812866, | |
| "grad_norm": 2.097038984298706, | |
| "learning_rate": 4.874307739902689e-05, | |
| "loss": 0.643, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 0.9590643274853801, | |
| "grad_norm": 1.84390389919281, | |
| "learning_rate": 4.8735076041063345e-05, | |
| "loss": 0.3644, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 0.9605263157894737, | |
| "grad_norm": 1.9462608098983765, | |
| "learning_rate": 4.872704995714312e-05, | |
| "loss": 0.4172, | |
| "step": 657 | |
| }, | |
| { | |
| "epoch": 0.9619883040935673, | |
| "grad_norm": 1.7451448440551758, | |
| "learning_rate": 4.871899915562736e-05, | |
| "loss": 0.4669, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 0.9634502923976608, | |
| "grad_norm": 2.386901617050171, | |
| "learning_rate": 4.871092364490297e-05, | |
| "loss": 0.568, | |
| "step": 659 | |
| }, | |
| { | |
| "epoch": 0.9649122807017544, | |
| "grad_norm": 2.0267467498779297, | |
| "learning_rate": 4.870282343338259e-05, | |
| "loss": 0.4633, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.966374269005848, | |
| "grad_norm": 2.4335813522338867, | |
| "learning_rate": 4.869469852950461e-05, | |
| "loss": 0.6197, | |
| "step": 661 | |
| }, | |
| { | |
| "epoch": 0.9678362573099415, | |
| "grad_norm": 2.3473143577575684, | |
| "learning_rate": 4.868654894173311e-05, | |
| "loss": 0.4807, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 0.9692982456140351, | |
| "grad_norm": 2.0476667881011963, | |
| "learning_rate": 4.8678374678557905e-05, | |
| "loss": 0.4589, | |
| "step": 663 | |
| }, | |
| { | |
| "epoch": 0.9707602339181286, | |
| "grad_norm": 2.15574312210083, | |
| "learning_rate": 4.867017574849452e-05, | |
| "loss": 0.5197, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 0.9722222222222222, | |
| "grad_norm": 2.1562066078186035, | |
| "learning_rate": 4.866195216008418e-05, | |
| "loss": 0.5114, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 0.9736842105263158, | |
| "grad_norm": 1.9275619983673096, | |
| "learning_rate": 4.8653703921893766e-05, | |
| "loss": 0.4302, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 0.9751461988304093, | |
| "grad_norm": 1.733986735343933, | |
| "learning_rate": 4.864543104251587e-05, | |
| "loss": 0.3591, | |
| "step": 667 | |
| }, | |
| { | |
| "epoch": 0.9766081871345029, | |
| "grad_norm": 1.9198062419891357, | |
| "learning_rate": 4.8637133530568745e-05, | |
| "loss": 0.3767, | |
| "step": 668 | |
| }, | |
| { | |
| "epoch": 0.9780701754385965, | |
| "grad_norm": 1.7654181718826294, | |
| "learning_rate": 4.862881139469631e-05, | |
| "loss": 0.4666, | |
| "step": 669 | |
| }, | |
| { | |
| "epoch": 0.97953216374269, | |
| "grad_norm": 2.2004752159118652, | |
| "learning_rate": 4.8620464643568106e-05, | |
| "loss": 0.5103, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.9809941520467836, | |
| "grad_norm": 1.6918439865112305, | |
| "learning_rate": 4.861209328587937e-05, | |
| "loss": 0.3815, | |
| "step": 671 | |
| }, | |
| { | |
| "epoch": 0.9824561403508771, | |
| "grad_norm": 1.6210284233093262, | |
| "learning_rate": 4.860369733035092e-05, | |
| "loss": 0.4294, | |
| "step": 672 | |
| }, | |
| { | |
| "epoch": 0.9839181286549707, | |
| "grad_norm": 3.5865211486816406, | |
| "learning_rate": 4.8595276785729236e-05, | |
| "loss": 0.796, | |
| "step": 673 | |
| }, | |
| { | |
| "epoch": 0.9853801169590644, | |
| "grad_norm": 1.6121916770935059, | |
| "learning_rate": 4.858683166078639e-05, | |
| "loss": 0.3539, | |
| "step": 674 | |
| }, | |
| { | |
| "epoch": 0.9868421052631579, | |
| "grad_norm": 1.5387506484985352, | |
| "learning_rate": 4.8578361964320084e-05, | |
| "loss": 0.3536, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.9883040935672515, | |
| "grad_norm": 1.8393478393554688, | |
| "learning_rate": 4.856986770515358e-05, | |
| "loss": 0.5108, | |
| "step": 676 | |
| }, | |
| { | |
| "epoch": 0.9897660818713451, | |
| "grad_norm": 2.026841878890991, | |
| "learning_rate": 4.856134889213578e-05, | |
| "loss": 0.4274, | |
| "step": 677 | |
| }, | |
| { | |
| "epoch": 0.9912280701754386, | |
| "grad_norm": 1.9634016752243042, | |
| "learning_rate": 4.855280553414111e-05, | |
| "loss": 0.4771, | |
| "step": 678 | |
| }, | |
| { | |
| "epoch": 0.9926900584795322, | |
| "grad_norm": 2.279492139816284, | |
| "learning_rate": 4.854423764006961e-05, | |
| "loss": 0.4596, | |
| "step": 679 | |
| }, | |
| { | |
| "epoch": 0.9941520467836257, | |
| "grad_norm": 2.174903392791748, | |
| "learning_rate": 4.853564521884686e-05, | |
| "loss": 0.4137, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.9956140350877193, | |
| "grad_norm": 1.97664213180542, | |
| "learning_rate": 4.852702827942398e-05, | |
| "loss": 0.4376, | |
| "step": 681 | |
| }, | |
| { | |
| "epoch": 0.9970760233918129, | |
| "grad_norm": 2.298906087875366, | |
| "learning_rate": 4.851838683077766e-05, | |
| "loss": 0.4959, | |
| "step": 682 | |
| }, | |
| { | |
| "epoch": 0.9985380116959064, | |
| "grad_norm": 1.6730514764785767, | |
| "learning_rate": 4.85097208819101e-05, | |
| "loss": 0.3965, | |
| "step": 683 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 1.7670232057571411, | |
| "learning_rate": 4.8501030441849035e-05, | |
| "loss": 0.373, | |
| "step": 684 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_loss": 0.4992051124572754, | |
| "eval_runtime": 38.9644, | |
| "eval_samples_per_second": 3.721, | |
| "eval_steps_per_second": 3.721, | |
| "step": 684 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 3420, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 5, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 1.6187979319406592e+16, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |