| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 5.913043478260869, |
| "eval_steps": 500, |
| "global_step": 366, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.016304347826086956, |
| "grad_norm": 45.78702163696289, |
| "learning_rate": 5.0000000000000004e-08, |
| "loss": 4.8845, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.03260869565217391, |
| "grad_norm": 45.26464080810547, |
| "learning_rate": 1.0000000000000001e-07, |
| "loss": 4.8307, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.04891304347826087, |
| "grad_norm": 45.96015167236328, |
| "learning_rate": 1.5000000000000002e-07, |
| "loss": 4.8322, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.06521739130434782, |
| "grad_norm": 45.86792755126953, |
| "learning_rate": 2.0000000000000002e-07, |
| "loss": 4.8841, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.08152173913043478, |
| "grad_norm": 44.19926071166992, |
| "learning_rate": 2.5000000000000004e-07, |
| "loss": 4.8013, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.09782608695652174, |
| "grad_norm": 45.76038360595703, |
| "learning_rate": 3.0000000000000004e-07, |
| "loss": 4.8112, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.11413043478260869, |
| "grad_norm": 45.73483657836914, |
| "learning_rate": 3.5000000000000004e-07, |
| "loss": 4.8235, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.13043478260869565, |
| "grad_norm": 46.2863655090332, |
| "learning_rate": 4.0000000000000003e-07, |
| "loss": 4.895, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.14673913043478262, |
| "grad_norm": 44.06720733642578, |
| "learning_rate": 4.5000000000000003e-07, |
| "loss": 4.7696, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.16304347826086957, |
| "grad_norm": 43.8859977722168, |
| "learning_rate": 5.000000000000001e-07, |
| "loss": 4.7098, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.1793478260869565, |
| "grad_norm": 44.059043884277344, |
| "learning_rate": 5.5e-07, |
| "loss": 4.7519, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.1956521739130435, |
| "grad_norm": 43.980201721191406, |
| "learning_rate": 6.000000000000001e-07, |
| "loss": 4.7465, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.21195652173913043, |
| "grad_norm": 40.88735580444336, |
| "learning_rate": 6.5e-07, |
| "loss": 4.5416, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.22826086956521738, |
| "grad_norm": 39.40138244628906, |
| "learning_rate": 7.000000000000001e-07, |
| "loss": 4.5273, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.24456521739130435, |
| "grad_norm": 37.21616744995117, |
| "learning_rate": 7.5e-07, |
| "loss": 4.4206, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.2608695652173913, |
| "grad_norm": 35.060447692871094, |
| "learning_rate": 8.000000000000001e-07, |
| "loss": 4.3318, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.27717391304347827, |
| "grad_norm": 30.492183685302734, |
| "learning_rate": 8.500000000000001e-07, |
| "loss": 4.0841, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.29347826086956524, |
| "grad_norm": 28.49239730834961, |
| "learning_rate": 9.000000000000001e-07, |
| "loss": 4.0979, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.30978260869565216, |
| "grad_norm": 24.898632049560547, |
| "learning_rate": 9.500000000000001e-07, |
| "loss": 3.8752, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.32608695652173914, |
| "grad_norm": 22.521434783935547, |
| "learning_rate": 1.0000000000000002e-06, |
| "loss": 3.77, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.3423913043478261, |
| "grad_norm": 20.311031341552734, |
| "learning_rate": 1.0500000000000001e-06, |
| "loss": 3.5973, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.358695652173913, |
| "grad_norm": 19.707839965820312, |
| "learning_rate": 1.1e-06, |
| "loss": 3.6102, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.375, |
| "grad_norm": 18.431489944458008, |
| "learning_rate": 1.1500000000000002e-06, |
| "loss": 3.4147, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.391304347826087, |
| "grad_norm": 17.78904151916504, |
| "learning_rate": 1.2000000000000002e-06, |
| "loss": 3.2665, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.4076086956521739, |
| "grad_norm": 18.014617919921875, |
| "learning_rate": 1.25e-06, |
| "loss": 3.1941, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.42391304347826086, |
| "grad_norm": 17.8990478515625, |
| "learning_rate": 1.3e-06, |
| "loss": 3.0377, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.44021739130434784, |
| "grad_norm": 18.60847282409668, |
| "learning_rate": 1.3500000000000002e-06, |
| "loss": 2.8489, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.45652173913043476, |
| "grad_norm": 18.562536239624023, |
| "learning_rate": 1.4000000000000001e-06, |
| "loss": 2.6229, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.47282608695652173, |
| "grad_norm": 19.00884246826172, |
| "learning_rate": 1.45e-06, |
| "loss": 2.4521, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.4891304347826087, |
| "grad_norm": 20.419940948486328, |
| "learning_rate": 1.5e-06, |
| "loss": 2.3907, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.5054347826086957, |
| "grad_norm": 21.181713104248047, |
| "learning_rate": 1.5500000000000002e-06, |
| "loss": 2.2032, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.5217391304347826, |
| "grad_norm": 20.592416763305664, |
| "learning_rate": 1.6000000000000001e-06, |
| "loss": 2.004, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.5380434782608695, |
| "grad_norm": 20.40924835205078, |
| "learning_rate": 1.6500000000000003e-06, |
| "loss": 1.8373, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.5543478260869565, |
| "grad_norm": 18.38524627685547, |
| "learning_rate": 1.7000000000000002e-06, |
| "loss": 1.6218, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.5706521739130435, |
| "grad_norm": 18.922391891479492, |
| "learning_rate": 1.75e-06, |
| "loss": 1.4708, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.5869565217391305, |
| "grad_norm": 20.200698852539062, |
| "learning_rate": 1.8000000000000001e-06, |
| "loss": 1.3044, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.6032608695652174, |
| "grad_norm": 20.70162582397461, |
| "learning_rate": 1.85e-06, |
| "loss": 1.1445, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.6195652173913043, |
| "grad_norm": 18.18869400024414, |
| "learning_rate": 1.9000000000000002e-06, |
| "loss": 0.9504, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.6358695652173914, |
| "grad_norm": 15.211593627929688, |
| "learning_rate": 1.9500000000000004e-06, |
| "loss": 0.8083, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.6521739130434783, |
| "grad_norm": 12.71890640258789, |
| "learning_rate": 2.0000000000000003e-06, |
| "loss": 0.6981, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.6684782608695652, |
| "grad_norm": 10.869053840637207, |
| "learning_rate": 2.05e-06, |
| "loss": 0.6018, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.6847826086956522, |
| "grad_norm": 9.5787353515625, |
| "learning_rate": 2.1000000000000002e-06, |
| "loss": 0.4899, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.7010869565217391, |
| "grad_norm": 9.319293975830078, |
| "learning_rate": 2.15e-06, |
| "loss": 0.3986, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.717391304347826, |
| "grad_norm": 9.404044151306152, |
| "learning_rate": 2.2e-06, |
| "loss": 0.3084, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.7336956521739131, |
| "grad_norm": 7.995025634765625, |
| "learning_rate": 2.25e-06, |
| "loss": 0.2373, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.75, |
| "grad_norm": 4.505397319793701, |
| "learning_rate": 2.3000000000000004e-06, |
| "loss": 0.1527, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.7663043478260869, |
| "grad_norm": 2.5063579082489014, |
| "learning_rate": 2.35e-06, |
| "loss": 0.1097, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.782608695652174, |
| "grad_norm": 1.5846028327941895, |
| "learning_rate": 2.4000000000000003e-06, |
| "loss": 0.102, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.7989130434782609, |
| "grad_norm": 1.1286852359771729, |
| "learning_rate": 2.4500000000000003e-06, |
| "loss": 0.093, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.8152173913043478, |
| "grad_norm": 1.0311343669891357, |
| "learning_rate": 2.5e-06, |
| "loss": 0.0778, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.8315217391304348, |
| "grad_norm": 0.6458576917648315, |
| "learning_rate": 2.55e-06, |
| "loss": 0.0883, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.8478260869565217, |
| "grad_norm": 0.727554202079773, |
| "learning_rate": 2.6e-06, |
| "loss": 0.0852, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.8641304347826086, |
| "grad_norm": 0.619137167930603, |
| "learning_rate": 2.6500000000000005e-06, |
| "loss": 0.0697, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.8804347826086957, |
| "grad_norm": 0.38241881132125854, |
| "learning_rate": 2.7000000000000004e-06, |
| "loss": 0.0665, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.8967391304347826, |
| "grad_norm": 0.541621744632721, |
| "learning_rate": 2.7500000000000004e-06, |
| "loss": 0.0658, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.9130434782608695, |
| "grad_norm": 0.3869657516479492, |
| "learning_rate": 2.8000000000000003e-06, |
| "loss": 0.0728, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.9293478260869565, |
| "grad_norm": 0.3768727481365204, |
| "learning_rate": 2.85e-06, |
| "loss": 0.0741, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.9456521739130435, |
| "grad_norm": 0.31400591135025024, |
| "learning_rate": 2.9e-06, |
| "loss": 0.0682, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.9619565217391305, |
| "grad_norm": 0.3604981303215027, |
| "learning_rate": 2.95e-06, |
| "loss": 0.0652, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.9782608695652174, |
| "grad_norm": 0.4383264482021332, |
| "learning_rate": 3e-06, |
| "loss": 0.0703, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.9945652173913043, |
| "grad_norm": 0.310332328081131, |
| "learning_rate": 3.05e-06, |
| "loss": 0.0664, |
| "step": 61 |
| }, |
| { |
| "epoch": 1.0, |
| "grad_norm": 0.310332328081131, |
| "learning_rate": 3.1000000000000004e-06, |
| "loss": 0.0509, |
| "step": 62 |
| }, |
| { |
| "epoch": 1.016304347826087, |
| "grad_norm": 0.665212869644165, |
| "learning_rate": 3.1500000000000003e-06, |
| "loss": 0.0633, |
| "step": 63 |
| }, |
| { |
| "epoch": 1.0326086956521738, |
| "grad_norm": 0.3108278512954712, |
| "learning_rate": 3.2000000000000003e-06, |
| "loss": 0.0648, |
| "step": 64 |
| }, |
| { |
| "epoch": 1.048913043478261, |
| "grad_norm": 0.5986258387565613, |
| "learning_rate": 3.2500000000000002e-06, |
| "loss": 0.0769, |
| "step": 65 |
| }, |
| { |
| "epoch": 1.065217391304348, |
| "grad_norm": 0.6987417936325073, |
| "learning_rate": 3.3000000000000006e-06, |
| "loss": 0.0728, |
| "step": 66 |
| }, |
| { |
| "epoch": 1.0815217391304348, |
| "grad_norm": 0.36069774627685547, |
| "learning_rate": 3.3500000000000005e-06, |
| "loss": 0.0647, |
| "step": 67 |
| }, |
| { |
| "epoch": 1.0978260869565217, |
| "grad_norm": 0.37604954838752747, |
| "learning_rate": 3.4000000000000005e-06, |
| "loss": 0.0571, |
| "step": 68 |
| }, |
| { |
| "epoch": 1.1141304347826086, |
| "grad_norm": 0.2985791862010956, |
| "learning_rate": 3.45e-06, |
| "loss": 0.0629, |
| "step": 69 |
| }, |
| { |
| "epoch": 1.1304347826086956, |
| "grad_norm": 0.3454388380050659, |
| "learning_rate": 3.5e-06, |
| "loss": 0.0644, |
| "step": 70 |
| }, |
| { |
| "epoch": 1.1467391304347827, |
| "grad_norm": 0.3371462821960449, |
| "learning_rate": 3.5500000000000003e-06, |
| "loss": 0.0645, |
| "step": 71 |
| }, |
| { |
| "epoch": 1.1630434782608696, |
| "grad_norm": 0.27834194898605347, |
| "learning_rate": 3.6000000000000003e-06, |
| "loss": 0.0678, |
| "step": 72 |
| }, |
| { |
| "epoch": 1.1793478260869565, |
| "grad_norm": 0.26285555958747864, |
| "learning_rate": 3.65e-06, |
| "loss": 0.0586, |
| "step": 73 |
| }, |
| { |
| "epoch": 1.1956521739130435, |
| "grad_norm": 0.34152188897132874, |
| "learning_rate": 3.7e-06, |
| "loss": 0.061, |
| "step": 74 |
| }, |
| { |
| "epoch": 1.2119565217391304, |
| "grad_norm": 0.2939279079437256, |
| "learning_rate": 3.7500000000000005e-06, |
| "loss": 0.0643, |
| "step": 75 |
| }, |
| { |
| "epoch": 1.2282608695652173, |
| "grad_norm": 0.395220547914505, |
| "learning_rate": 3.8000000000000005e-06, |
| "loss": 0.0644, |
| "step": 76 |
| }, |
| { |
| "epoch": 1.2445652173913044, |
| "grad_norm": 0.29400259256362915, |
| "learning_rate": 3.85e-06, |
| "loss": 0.0562, |
| "step": 77 |
| }, |
| { |
| "epoch": 1.2608695652173914, |
| "grad_norm": 0.25938117504119873, |
| "learning_rate": 3.900000000000001e-06, |
| "loss": 0.0658, |
| "step": 78 |
| }, |
| { |
| "epoch": 1.2771739130434783, |
| "grad_norm": 0.36941587924957275, |
| "learning_rate": 3.95e-06, |
| "loss": 0.0678, |
| "step": 79 |
| }, |
| { |
| "epoch": 1.2934782608695652, |
| "grad_norm": 0.26572781801223755, |
| "learning_rate": 4.000000000000001e-06, |
| "loss": 0.0588, |
| "step": 80 |
| }, |
| { |
| "epoch": 1.309782608695652, |
| "grad_norm": 0.22964634001255035, |
| "learning_rate": 4.05e-06, |
| "loss": 0.0612, |
| "step": 81 |
| }, |
| { |
| "epoch": 1.3260869565217392, |
| "grad_norm": 0.24455289542675018, |
| "learning_rate": 4.1e-06, |
| "loss": 0.059, |
| "step": 82 |
| }, |
| { |
| "epoch": 1.3423913043478262, |
| "grad_norm": 0.3925253748893738, |
| "learning_rate": 4.15e-06, |
| "loss": 0.0648, |
| "step": 83 |
| }, |
| { |
| "epoch": 1.358695652173913, |
| "grad_norm": 0.24822917580604553, |
| "learning_rate": 4.2000000000000004e-06, |
| "loss": 0.0618, |
| "step": 84 |
| }, |
| { |
| "epoch": 1.375, |
| "grad_norm": 0.2522635757923126, |
| "learning_rate": 4.25e-06, |
| "loss": 0.0568, |
| "step": 85 |
| }, |
| { |
| "epoch": 1.391304347826087, |
| "grad_norm": 0.2465311586856842, |
| "learning_rate": 4.3e-06, |
| "loss": 0.0613, |
| "step": 86 |
| }, |
| { |
| "epoch": 1.4076086956521738, |
| "grad_norm": 0.2514893412590027, |
| "learning_rate": 4.350000000000001e-06, |
| "loss": 0.0574, |
| "step": 87 |
| }, |
| { |
| "epoch": 1.4239130434782608, |
| "grad_norm": 0.2320777177810669, |
| "learning_rate": 4.4e-06, |
| "loss": 0.0502, |
| "step": 88 |
| }, |
| { |
| "epoch": 1.440217391304348, |
| "grad_norm": 0.2494516372680664, |
| "learning_rate": 4.450000000000001e-06, |
| "loss": 0.0685, |
| "step": 89 |
| }, |
| { |
| "epoch": 1.4565217391304348, |
| "grad_norm": 0.244571715593338, |
| "learning_rate": 4.5e-06, |
| "loss": 0.0588, |
| "step": 90 |
| }, |
| { |
| "epoch": 1.4728260869565217, |
| "grad_norm": 0.22765810787677765, |
| "learning_rate": 4.5500000000000005e-06, |
| "loss": 0.0591, |
| "step": 91 |
| }, |
| { |
| "epoch": 1.4891304347826086, |
| "grad_norm": 0.2349582016468048, |
| "learning_rate": 4.600000000000001e-06, |
| "loss": 0.0572, |
| "step": 92 |
| }, |
| { |
| "epoch": 1.5054347826086958, |
| "grad_norm": 0.228654682636261, |
| "learning_rate": 4.65e-06, |
| "loss": 0.0593, |
| "step": 93 |
| }, |
| { |
| "epoch": 1.5217391304347827, |
| "grad_norm": 0.3090372681617737, |
| "learning_rate": 4.7e-06, |
| "loss": 0.0592, |
| "step": 94 |
| }, |
| { |
| "epoch": 1.5380434782608696, |
| "grad_norm": 0.2116968333721161, |
| "learning_rate": 4.75e-06, |
| "loss": 0.0575, |
| "step": 95 |
| }, |
| { |
| "epoch": 1.5543478260869565, |
| "grad_norm": 0.32294484972953796, |
| "learning_rate": 4.800000000000001e-06, |
| "loss": 0.062, |
| "step": 96 |
| }, |
| { |
| "epoch": 1.5706521739130435, |
| "grad_norm": 0.35240596532821655, |
| "learning_rate": 4.85e-06, |
| "loss": 0.0561, |
| "step": 97 |
| }, |
| { |
| "epoch": 1.5869565217391304, |
| "grad_norm": 0.24272935092449188, |
| "learning_rate": 4.9000000000000005e-06, |
| "loss": 0.0591, |
| "step": 98 |
| }, |
| { |
| "epoch": 1.6032608695652173, |
| "grad_norm": 0.26952850818634033, |
| "learning_rate": 4.95e-06, |
| "loss": 0.0557, |
| "step": 99 |
| }, |
| { |
| "epoch": 1.6195652173913042, |
| "grad_norm": 0.2775789201259613, |
| "learning_rate": 5e-06, |
| "loss": 0.0624, |
| "step": 100 |
| }, |
| { |
| "epoch": 1.6358695652173914, |
| "grad_norm": 0.2162039428949356, |
| "learning_rate": 4.999825642177387e-06, |
| "loss": 0.0582, |
| "step": 101 |
| }, |
| { |
| "epoch": 1.6521739130434783, |
| "grad_norm": 0.38436195254325867, |
| "learning_rate": 4.999302593030069e-06, |
| "loss": 0.0618, |
| "step": 102 |
| }, |
| { |
| "epoch": 1.6684782608695652, |
| "grad_norm": 0.2860764265060425, |
| "learning_rate": 4.998430925516213e-06, |
| "loss": 0.0637, |
| "step": 103 |
| }, |
| { |
| "epoch": 1.6847826086956523, |
| "grad_norm": 0.2180222123861313, |
| "learning_rate": 4.99721076122146e-06, |
| "loss": 0.0565, |
| "step": 104 |
| }, |
| { |
| "epoch": 1.7010869565217392, |
| "grad_norm": 0.30026087164878845, |
| "learning_rate": 4.995642270341961e-06, |
| "loss": 0.0615, |
| "step": 105 |
| }, |
| { |
| "epoch": 1.7173913043478262, |
| "grad_norm": 0.27657821774482727, |
| "learning_rate": 4.99372567166064e-06, |
| "loss": 0.0661, |
| "step": 106 |
| }, |
| { |
| "epoch": 1.733695652173913, |
| "grad_norm": 0.23324432969093323, |
| "learning_rate": 4.991461232516675e-06, |
| "loss": 0.0563, |
| "step": 107 |
| }, |
| { |
| "epoch": 1.75, |
| "grad_norm": 0.2670609951019287, |
| "learning_rate": 4.98884926876821e-06, |
| "loss": 0.0671, |
| "step": 108 |
| }, |
| { |
| "epoch": 1.766304347826087, |
| "grad_norm": 0.2405894696712494, |
| "learning_rate": 4.9858901447482924e-06, |
| "loss": 0.0571, |
| "step": 109 |
| }, |
| { |
| "epoch": 1.7826086956521738, |
| "grad_norm": 0.23271754384040833, |
| "learning_rate": 4.982584273214061e-06, |
| "loss": 0.0598, |
| "step": 110 |
| }, |
| { |
| "epoch": 1.7989130434782608, |
| "grad_norm": 0.21662941575050354, |
| "learning_rate": 4.978932115289165e-06, |
| "loss": 0.0562, |
| "step": 111 |
| }, |
| { |
| "epoch": 1.8152173913043477, |
| "grad_norm": 0.27656564116477966, |
| "learning_rate": 4.974934180399447e-06, |
| "loss": 0.0531, |
| "step": 112 |
| }, |
| { |
| "epoch": 1.8315217391304348, |
| "grad_norm": 0.3287908434867859, |
| "learning_rate": 4.970591026201884e-06, |
| "loss": 0.0522, |
| "step": 113 |
| }, |
| { |
| "epoch": 1.8478260869565217, |
| "grad_norm": 0.1879669576883316, |
| "learning_rate": 4.965903258506806e-06, |
| "loss": 0.0625, |
| "step": 114 |
| }, |
| { |
| "epoch": 1.8641304347826086, |
| "grad_norm": 0.2798942029476166, |
| "learning_rate": 4.9608715311933865e-06, |
| "loss": 0.0546, |
| "step": 115 |
| }, |
| { |
| "epoch": 1.8804347826086958, |
| "grad_norm": 0.31130167841911316, |
| "learning_rate": 4.955496546118439e-06, |
| "loss": 0.0576, |
| "step": 116 |
| }, |
| { |
| "epoch": 1.8967391304347827, |
| "grad_norm": 0.23934200406074524, |
| "learning_rate": 4.949779053018519e-06, |
| "loss": 0.0522, |
| "step": 117 |
| }, |
| { |
| "epoch": 1.9130434782608696, |
| "grad_norm": 0.2684226632118225, |
| "learning_rate": 4.943719849405347e-06, |
| "loss": 0.0596, |
| "step": 118 |
| }, |
| { |
| "epoch": 1.9293478260869565, |
| "grad_norm": 0.31276896595954895, |
| "learning_rate": 4.937319780454559e-06, |
| "loss": 0.0609, |
| "step": 119 |
| }, |
| { |
| "epoch": 1.9456521739130435, |
| "grad_norm": 0.2928497791290283, |
| "learning_rate": 4.930579738887827e-06, |
| "loss": 0.0515, |
| "step": 120 |
| }, |
| { |
| "epoch": 1.9619565217391304, |
| "grad_norm": 0.25487199425697327, |
| "learning_rate": 4.923500664848327e-06, |
| "loss": 0.0565, |
| "step": 121 |
| }, |
| { |
| "epoch": 1.9782608695652173, |
| "grad_norm": 0.19076956808567047, |
| "learning_rate": 4.9160835457696075e-06, |
| "loss": 0.0541, |
| "step": 122 |
| }, |
| { |
| "epoch": 1.9945652173913042, |
| "grad_norm": 0.36202409863471985, |
| "learning_rate": 4.9083294162378545e-06, |
| "loss": 0.0572, |
| "step": 123 |
| }, |
| { |
| "epoch": 2.0, |
| "grad_norm": 0.36202409863471985, |
| "learning_rate": 4.900239357847582e-06, |
| "loss": 0.0578, |
| "step": 124 |
| }, |
| { |
| "epoch": 2.016304347826087, |
| "grad_norm": 0.5418670177459717, |
| "learning_rate": 4.891814499050762e-06, |
| "loss": 0.0504, |
| "step": 125 |
| }, |
| { |
| "epoch": 2.032608695652174, |
| "grad_norm": 0.24317026138305664, |
| "learning_rate": 4.883056014999423e-06, |
| "loss": 0.0557, |
| "step": 126 |
| }, |
| { |
| "epoch": 2.0489130434782608, |
| "grad_norm": 0.23591496050357819, |
| "learning_rate": 4.873965127381734e-06, |
| "loss": 0.0567, |
| "step": 127 |
| }, |
| { |
| "epoch": 2.0652173913043477, |
| "grad_norm": 0.26839157938957214, |
| "learning_rate": 4.864543104251587e-06, |
| "loss": 0.0489, |
| "step": 128 |
| }, |
| { |
| "epoch": 2.0815217391304346, |
| "grad_norm": 0.23912706971168518, |
| "learning_rate": 4.854791259851735e-06, |
| "loss": 0.0519, |
| "step": 129 |
| }, |
| { |
| "epoch": 2.097826086956522, |
| "grad_norm": 0.22615790367126465, |
| "learning_rate": 4.844710954430464e-06, |
| "loss": 0.0533, |
| "step": 130 |
| }, |
| { |
| "epoch": 2.114130434782609, |
| "grad_norm": 0.18881212174892426, |
| "learning_rate": 4.834303594051854e-06, |
| "loss": 0.0509, |
| "step": 131 |
| }, |
| { |
| "epoch": 2.130434782608696, |
| "grad_norm": 0.20288890600204468, |
| "learning_rate": 4.823570630399665e-06, |
| "loss": 0.0515, |
| "step": 132 |
| }, |
| { |
| "epoch": 2.1467391304347827, |
| "grad_norm": 0.2495969533920288, |
| "learning_rate": 4.812513560574832e-06, |
| "loss": 0.0593, |
| "step": 133 |
| }, |
| { |
| "epoch": 2.1630434782608696, |
| "grad_norm": 0.25076112151145935, |
| "learning_rate": 4.8011339268866505e-06, |
| "loss": 0.0546, |
| "step": 134 |
| }, |
| { |
| "epoch": 2.1793478260869565, |
| "grad_norm": 0.26761433482170105, |
| "learning_rate": 4.789433316637644e-06, |
| "loss": 0.0494, |
| "step": 135 |
| }, |
| { |
| "epoch": 2.1956521739130435, |
| "grad_norm": 0.22908158600330353, |
| "learning_rate": 4.777413361902152e-06, |
| "loss": 0.0556, |
| "step": 136 |
| }, |
| { |
| "epoch": 2.2119565217391304, |
| "grad_norm": 0.24989894032478333, |
| "learning_rate": 4.765075739298683e-06, |
| "loss": 0.0457, |
| "step": 137 |
| }, |
| { |
| "epoch": 2.2282608695652173, |
| "grad_norm": 0.19970546662807465, |
| "learning_rate": 4.752422169756048e-06, |
| "loss": 0.0483, |
| "step": 138 |
| }, |
| { |
| "epoch": 2.244565217391304, |
| "grad_norm": 0.2547377347946167, |
| "learning_rate": 4.739454418273314e-06, |
| "loss": 0.0562, |
| "step": 139 |
| }, |
| { |
| "epoch": 2.260869565217391, |
| "grad_norm": 0.30050888657569885, |
| "learning_rate": 4.726174293673612e-06, |
| "loss": 0.0474, |
| "step": 140 |
| }, |
| { |
| "epoch": 2.2771739130434785, |
| "grad_norm": 0.20995593070983887, |
| "learning_rate": 4.712583648351827e-06, |
| "loss": 0.0486, |
| "step": 141 |
| }, |
| { |
| "epoch": 2.2934782608695654, |
| "grad_norm": 0.24649159610271454, |
| "learning_rate": 4.698684378016223e-06, |
| "loss": 0.0521, |
| "step": 142 |
| }, |
| { |
| "epoch": 2.3097826086956523, |
| "grad_norm": 0.23422546684741974, |
| "learning_rate": 4.684478421424007e-06, |
| "loss": 0.0521, |
| "step": 143 |
| }, |
| { |
| "epoch": 2.3260869565217392, |
| "grad_norm": 0.21018289029598236, |
| "learning_rate": 4.669967760110908e-06, |
| "loss": 0.0503, |
| "step": 144 |
| }, |
| { |
| "epoch": 2.342391304347826, |
| "grad_norm": 0.20515775680541992, |
| "learning_rate": 4.655154418114774e-06, |
| "loss": 0.0545, |
| "step": 145 |
| }, |
| { |
| "epoch": 2.358695652173913, |
| "grad_norm": 0.2237028032541275, |
| "learning_rate": 4.6400404616932505e-06, |
| "loss": 0.052, |
| "step": 146 |
| }, |
| { |
| "epoch": 2.375, |
| "grad_norm": 0.24619129300117493, |
| "learning_rate": 4.624627999035564e-06, |
| "loss": 0.059, |
| "step": 147 |
| }, |
| { |
| "epoch": 2.391304347826087, |
| "grad_norm": 0.20323814451694489, |
| "learning_rate": 4.608919179968457e-06, |
| "loss": 0.0534, |
| "step": 148 |
| }, |
| { |
| "epoch": 2.407608695652174, |
| "grad_norm": 0.21665000915527344, |
| "learning_rate": 4.592916195656322e-06, |
| "loss": 0.0505, |
| "step": 149 |
| }, |
| { |
| "epoch": 2.4239130434782608, |
| "grad_norm": 0.21209590137004852, |
| "learning_rate": 4.576621278295558e-06, |
| "loss": 0.055, |
| "step": 150 |
| }, |
| { |
| "epoch": 2.4402173913043477, |
| "grad_norm": 0.19232606887817383, |
| "learning_rate": 4.5600367008032135e-06, |
| "loss": 0.0533, |
| "step": 151 |
| }, |
| { |
| "epoch": 2.4565217391304346, |
| "grad_norm": 0.22477513551712036, |
| "learning_rate": 4.543164776499945e-06, |
| "loss": 0.0544, |
| "step": 152 |
| }, |
| { |
| "epoch": 2.4728260869565215, |
| "grad_norm": 0.21310777962207794, |
| "learning_rate": 4.5260078587873416e-06, |
| "loss": 0.0534, |
| "step": 153 |
| }, |
| { |
| "epoch": 2.489130434782609, |
| "grad_norm": 0.2207050621509552, |
| "learning_rate": 4.508568340819654e-06, |
| "loss": 0.0516, |
| "step": 154 |
| }, |
| { |
| "epoch": 2.505434782608696, |
| "grad_norm": 0.22430899739265442, |
| "learning_rate": 4.490848655169986e-06, |
| "loss": 0.0552, |
| "step": 155 |
| }, |
| { |
| "epoch": 2.5217391304347827, |
| "grad_norm": 0.2665068805217743, |
| "learning_rate": 4.472851273490985e-06, |
| "loss": 0.0517, |
| "step": 156 |
| }, |
| { |
| "epoch": 2.5380434782608696, |
| "grad_norm": 0.20904089510440826, |
| "learning_rate": 4.454578706170075e-06, |
| "loss": 0.0477, |
| "step": 157 |
| }, |
| { |
| "epoch": 2.5543478260869565, |
| "grad_norm": 0.21974575519561768, |
| "learning_rate": 4.436033501979299e-06, |
| "loss": 0.0465, |
| "step": 158 |
| }, |
| { |
| "epoch": 2.5706521739130435, |
| "grad_norm": 0.2737596333026886, |
| "learning_rate": 4.417218247719794e-06, |
| "loss": 0.0443, |
| "step": 159 |
| }, |
| { |
| "epoch": 2.5869565217391304, |
| "grad_norm": 0.19713006913661957, |
| "learning_rate": 4.398135567860972e-06, |
| "loss": 0.0565, |
| "step": 160 |
| }, |
| { |
| "epoch": 2.6032608695652173, |
| "grad_norm": 0.24970802664756775, |
| "learning_rate": 4.378788124174441e-06, |
| "loss": 0.0473, |
| "step": 161 |
| }, |
| { |
| "epoch": 2.619565217391304, |
| "grad_norm": 0.20846787095069885, |
| "learning_rate": 4.359178615362725e-06, |
| "loss": 0.0511, |
| "step": 162 |
| }, |
| { |
| "epoch": 2.6358695652173916, |
| "grad_norm": 0.33354219794273376, |
| "learning_rate": 4.33930977668283e-06, |
| "loss": 0.0541, |
| "step": 163 |
| }, |
| { |
| "epoch": 2.6521739130434785, |
| "grad_norm": 0.23585955798625946, |
| "learning_rate": 4.319184379564716e-06, |
| "loss": 0.0534, |
| "step": 164 |
| }, |
| { |
| "epoch": 2.6684782608695654, |
| "grad_norm": 0.26923519372940063, |
| "learning_rate": 4.298805231224721e-06, |
| "loss": 0.052, |
| "step": 165 |
| }, |
| { |
| "epoch": 2.6847826086956523, |
| "grad_norm": 0.20466569066047668, |
| "learning_rate": 4.278175174273989e-06, |
| "loss": 0.0532, |
| "step": 166 |
| }, |
| { |
| "epoch": 2.7010869565217392, |
| "grad_norm": 0.27908194065093994, |
| "learning_rate": 4.257297086321967e-06, |
| "loss": 0.0497, |
| "step": 167 |
| }, |
| { |
| "epoch": 2.717391304347826, |
| "grad_norm": 0.29970937967300415, |
| "learning_rate": 4.236173879575022e-06, |
| "loss": 0.0502, |
| "step": 168 |
| }, |
| { |
| "epoch": 2.733695652173913, |
| "grad_norm": 0.2322796732187271, |
| "learning_rate": 4.2148085004302205e-06, |
| "loss": 0.054, |
| "step": 169 |
| }, |
| { |
| "epoch": 2.75, |
| "grad_norm": 0.1984764039516449, |
| "learning_rate": 4.1932039290643534e-06, |
| "loss": 0.0491, |
| "step": 170 |
| }, |
| { |
| "epoch": 2.766304347826087, |
| "grad_norm": 0.20822829008102417, |
| "learning_rate": 4.1713631790182366e-06, |
| "loss": 0.0555, |
| "step": 171 |
| }, |
| { |
| "epoch": 2.782608695652174, |
| "grad_norm": 0.2537862956523895, |
| "learning_rate": 4.149289296776369e-06, |
| "loss": 0.0479, |
| "step": 172 |
| }, |
| { |
| "epoch": 2.7989130434782608, |
| "grad_norm": 0.20386171340942383, |
| "learning_rate": 4.126985361341984e-06, |
| "loss": 0.049, |
| "step": 173 |
| }, |
| { |
| "epoch": 2.8152173913043477, |
| "grad_norm": 0.28424644470214844, |
| "learning_rate": 4.104454483807579e-06, |
| "loss": 0.0481, |
| "step": 174 |
| }, |
| { |
| "epoch": 2.8315217391304346, |
| "grad_norm": 0.2668056786060333, |
| "learning_rate": 4.0816998069209516e-06, |
| "loss": 0.0528, |
| "step": 175 |
| }, |
| { |
| "epoch": 2.8478260869565215, |
| "grad_norm": 0.2771640419960022, |
| "learning_rate": 4.058724504646834e-06, |
| "loss": 0.0673, |
| "step": 176 |
| }, |
| { |
| "epoch": 2.8641304347826084, |
| "grad_norm": 0.26568353176116943, |
| "learning_rate": 4.0355317817241705e-06, |
| "loss": 0.0438, |
| "step": 177 |
| }, |
| { |
| "epoch": 2.880434782608696, |
| "grad_norm": 0.21058987081050873, |
| "learning_rate": 4.012124873219094e-06, |
| "loss": 0.0499, |
| "step": 178 |
| }, |
| { |
| "epoch": 2.8967391304347827, |
| "grad_norm": 0.27742889523506165, |
| "learning_rate": 3.988507044073687e-06, |
| "loss": 0.047, |
| "step": 179 |
| }, |
| { |
| "epoch": 2.9130434782608696, |
| "grad_norm": 0.21633900701999664, |
| "learning_rate": 3.964681588650562e-06, |
| "loss": 0.0485, |
| "step": 180 |
| }, |
| { |
| "epoch": 2.9293478260869565, |
| "grad_norm": 0.21723268926143646, |
| "learning_rate": 3.940651830273342e-06, |
| "loss": 0.0535, |
| "step": 181 |
| }, |
| { |
| "epoch": 2.9456521739130435, |
| "grad_norm": 0.21648381650447845, |
| "learning_rate": 3.916421120763106e-06, |
| "loss": 0.0469, |
| "step": 182 |
| }, |
| { |
| "epoch": 2.9619565217391304, |
| "grad_norm": 0.2256888449192047, |
| "learning_rate": 3.891992839970855e-06, |
| "loss": 0.0469, |
| "step": 183 |
| }, |
| { |
| "epoch": 2.9782608695652173, |
| "grad_norm": 0.3136005699634552, |
| "learning_rate": 3.8673703953060685e-06, |
| "loss": 0.0517, |
| "step": 184 |
| }, |
| { |
| "epoch": 2.994565217391304, |
| "grad_norm": 0.2358432561159134, |
| "learning_rate": 3.8425572212614155e-06, |
| "loss": 0.0511, |
| "step": 185 |
| }, |
| { |
| "epoch": 3.0, |
| "grad_norm": 0.4989979565143585, |
| "learning_rate": 3.817556778933697e-06, |
| "loss": 0.0502, |
| "step": 186 |
| }, |
| { |
| "epoch": 3.016304347826087, |
| "grad_norm": 0.24074898660182953, |
| "learning_rate": 3.792372555541064e-06, |
| "loss": 0.0446, |
| "step": 187 |
| }, |
| { |
| "epoch": 3.032608695652174, |
| "grad_norm": 0.23359227180480957, |
| "learning_rate": 3.7670080639366e-06, |
| "loss": 0.0444, |
| "step": 188 |
| }, |
| { |
| "epoch": 3.0489130434782608, |
| "grad_norm": 0.21096405386924744, |
| "learning_rate": 3.741466842118327e-06, |
| "loss": 0.0492, |
| "step": 189 |
| }, |
| { |
| "epoch": 3.0652173913043477, |
| "grad_norm": 0.18678264319896698, |
| "learning_rate": 3.7157524527357036e-06, |
| "loss": 0.0458, |
| "step": 190 |
| }, |
| { |
| "epoch": 3.0815217391304346, |
| "grad_norm": 0.21836838126182556, |
| "learning_rate": 3.6898684825926845e-06, |
| "loss": 0.0471, |
| "step": 191 |
| }, |
| { |
| "epoch": 3.097826086956522, |
| "grad_norm": 0.21061812341213226, |
| "learning_rate": 3.663818542147409e-06, |
| "loss": 0.0469, |
| "step": 192 |
| }, |
| { |
| "epoch": 3.114130434782609, |
| "grad_norm": 0.2022838145494461, |
| "learning_rate": 3.6376062650085918e-06, |
| "loss": 0.0457, |
| "step": 193 |
| }, |
| { |
| "epoch": 3.130434782608696, |
| "grad_norm": 0.21767382323741913, |
| "learning_rate": 3.61123530742869e-06, |
| "loss": 0.0468, |
| "step": 194 |
| }, |
| { |
| "epoch": 3.1467391304347827, |
| "grad_norm": 0.2330484390258789, |
| "learning_rate": 3.5847093477938955e-06, |
| "loss": 0.0437, |
| "step": 195 |
| }, |
| { |
| "epoch": 3.1630434782608696, |
| "grad_norm": 0.21127860248088837, |
| "learning_rate": 3.5580320861110627e-06, |
| "loss": 0.0459, |
| "step": 196 |
| }, |
| { |
| "epoch": 3.1793478260869565, |
| "grad_norm": 0.20847401022911072, |
| "learning_rate": 3.5312072434915983e-06, |
| "loss": 0.0451, |
| "step": 197 |
| }, |
| { |
| "epoch": 3.1956521739130435, |
| "grad_norm": 0.20767691731452942, |
| "learning_rate": 3.5042385616324243e-06, |
| "loss": 0.0434, |
| "step": 198 |
| }, |
| { |
| "epoch": 3.2119565217391304, |
| "grad_norm": 0.21209655702114105, |
| "learning_rate": 3.477129802294057e-06, |
| "loss": 0.0418, |
| "step": 199 |
| }, |
| { |
| "epoch": 3.2282608695652173, |
| "grad_norm": 0.21463370323181152, |
| "learning_rate": 3.4498847467759e-06, |
| "loss": 0.0408, |
| "step": 200 |
| }, |
| { |
| "epoch": 3.244565217391304, |
| "grad_norm": 0.19667920470237732, |
| "learning_rate": 3.4225071953887977e-06, |
| "loss": 0.0476, |
| "step": 201 |
| }, |
| { |
| "epoch": 3.260869565217391, |
| "grad_norm": 0.2126225084066391, |
| "learning_rate": 3.3950009669249502e-06, |
| "loss": 0.0438, |
| "step": 202 |
| }, |
| { |
| "epoch": 3.2771739130434785, |
| "grad_norm": 0.21141058206558228, |
| "learning_rate": 3.3673698981252385e-06, |
| "loss": 0.0443, |
| "step": 203 |
| }, |
| { |
| "epoch": 3.2934782608695654, |
| "grad_norm": 0.3029347360134125, |
| "learning_rate": 3.3396178431440572e-06, |
| "loss": 0.0549, |
| "step": 204 |
| }, |
| { |
| "epoch": 3.3097826086956523, |
| "grad_norm": 0.20408247411251068, |
| "learning_rate": 3.3117486730117092e-06, |
| "loss": 0.0441, |
| "step": 205 |
| }, |
| { |
| "epoch": 3.3260869565217392, |
| "grad_norm": 0.19485828280448914, |
| "learning_rate": 3.283766275094454e-06, |
| "loss": 0.0484, |
| "step": 206 |
| }, |
| { |
| "epoch": 3.342391304347826, |
| "grad_norm": 0.23771652579307556, |
| "learning_rate": 3.255674552552267e-06, |
| "loss": 0.0478, |
| "step": 207 |
| }, |
| { |
| "epoch": 3.358695652173913, |
| "grad_norm": 0.21850208938121796, |
| "learning_rate": 3.227477423794412e-06, |
| "loss": 0.0487, |
| "step": 208 |
| }, |
| { |
| "epoch": 3.375, |
| "grad_norm": 0.23703360557556152, |
| "learning_rate": 3.1991788219328657e-06, |
| "loss": 0.0505, |
| "step": 209 |
| }, |
| { |
| "epoch": 3.391304347826087, |
| "grad_norm": 0.21323414146900177, |
| "learning_rate": 3.1707826942337124e-06, |
| "loss": 0.0398, |
| "step": 210 |
| }, |
| { |
| "epoch": 3.407608695652174, |
| "grad_norm": 0.28237420320510864, |
| "learning_rate": 3.142293001566548e-06, |
| "loss": 0.0429, |
| "step": 211 |
| }, |
| { |
| "epoch": 3.4239130434782608, |
| "grad_norm": 0.24118061363697052, |
| "learning_rate": 3.1137137178519983e-06, |
| "loss": 0.0441, |
| "step": 212 |
| }, |
| { |
| "epoch": 3.4402173913043477, |
| "grad_norm": 0.20298974215984344, |
| "learning_rate": 3.085048829507406e-06, |
| "loss": 0.0429, |
| "step": 213 |
| }, |
| { |
| "epoch": 3.4565217391304346, |
| "grad_norm": 0.2397722750902176, |
| "learning_rate": 3.056302334890786e-06, |
| "loss": 0.0487, |
| "step": 214 |
| }, |
| { |
| "epoch": 3.4728260869565215, |
| "grad_norm": 0.24425837397575378, |
| "learning_rate": 3.027478243743106e-06, |
| "loss": 0.0465, |
| "step": 215 |
| }, |
| { |
| "epoch": 3.489130434782609, |
| "grad_norm": 0.2465757578611374, |
| "learning_rate": 2.9985805766289815e-06, |
| "loss": 0.0415, |
| "step": 216 |
| }, |
| { |
| "epoch": 3.505434782608696, |
| "grad_norm": 0.2629953920841217, |
| "learning_rate": 2.9696133643758663e-06, |
| "loss": 0.0456, |
| "step": 217 |
| }, |
| { |
| "epoch": 3.5217391304347827, |
| "grad_norm": 0.23994584381580353, |
| "learning_rate": 2.940580647511805e-06, |
| "loss": 0.0467, |
| "step": 218 |
| }, |
| { |
| "epoch": 3.5380434782608696, |
| "grad_norm": 0.29615387320518494, |
| "learning_rate": 2.911486475701835e-06, |
| "loss": 0.0468, |
| "step": 219 |
| }, |
| { |
| "epoch": 3.5543478260869565, |
| "grad_norm": 0.28419968485832214, |
| "learning_rate": 2.8823349071831154e-06, |
| "loss": 0.0479, |
| "step": 220 |
| }, |
| { |
| "epoch": 3.5706521739130435, |
| "grad_norm": 0.2224627584218979, |
| "learning_rate": 2.853130008198855e-06, |
| "loss": 0.0436, |
| "step": 221 |
| }, |
| { |
| "epoch": 3.5869565217391304, |
| "grad_norm": 0.2682191729545593, |
| "learning_rate": 2.8238758524311316e-06, |
| "loss": 0.0439, |
| "step": 222 |
| }, |
| { |
| "epoch": 3.6032608695652173, |
| "grad_norm": 0.2094065397977829, |
| "learning_rate": 2.7945765204326664e-06, |
| "loss": 0.0472, |
| "step": 223 |
| }, |
| { |
| "epoch": 3.619565217391304, |
| "grad_norm": 0.22561469674110413, |
| "learning_rate": 2.7652360990576457e-06, |
| "loss": 0.0426, |
| "step": 224 |
| }, |
| { |
| "epoch": 3.6358695652173916, |
| "grad_norm": 0.1920589804649353, |
| "learning_rate": 2.735858680891656e-06, |
| "loss": 0.0465, |
| "step": 225 |
| }, |
| { |
| "epoch": 3.6521739130434785, |
| "grad_norm": 0.2274886816740036, |
| "learning_rate": 2.7064483636808314e-06, |
| "loss": 0.0471, |
| "step": 226 |
| }, |
| { |
| "epoch": 3.6684782608695654, |
| "grad_norm": 0.27154678106307983, |
| "learning_rate": 2.677009249760268e-06, |
| "loss": 0.0537, |
| "step": 227 |
| }, |
| { |
| "epoch": 3.6847826086956523, |
| "grad_norm": 0.19993992149829865, |
| "learning_rate": 2.6475454454818072e-06, |
| "loss": 0.0423, |
| "step": 228 |
| }, |
| { |
| "epoch": 3.7010869565217392, |
| "grad_norm": 0.26778337359428406, |
| "learning_rate": 2.6180610606412587e-06, |
| "loss": 0.0502, |
| "step": 229 |
| }, |
| { |
| "epoch": 3.717391304347826, |
| "grad_norm": 0.22767065465450287, |
| "learning_rate": 2.5885602079051354e-06, |
| "loss": 0.0447, |
| "step": 230 |
| }, |
| { |
| "epoch": 3.733695652173913, |
| "grad_norm": 0.2405000478029251, |
| "learning_rate": 2.559047002236995e-06, |
| "loss": 0.0463, |
| "step": 231 |
| }, |
| { |
| "epoch": 3.75, |
| "grad_norm": 0.23877452313899994, |
| "learning_rate": 2.529525560323462e-06, |
| "loss": 0.0472, |
| "step": 232 |
| }, |
| { |
| "epoch": 3.766304347826087, |
| "grad_norm": 0.2590119540691376, |
| "learning_rate": 2.5e-06, |
| "loss": 0.0474, |
| "step": 233 |
| }, |
| { |
| "epoch": 3.782608695652174, |
| "grad_norm": 0.2477138489484787, |
| "learning_rate": 2.470474439676539e-06, |
| "loss": 0.0461, |
| "step": 234 |
| }, |
| { |
| "epoch": 3.7989130434782608, |
| "grad_norm": 0.23399893939495087, |
| "learning_rate": 2.4409529977630052e-06, |
| "loss": 0.0446, |
| "step": 235 |
| }, |
| { |
| "epoch": 3.8152173913043477, |
| "grad_norm": 0.24198178946971893, |
| "learning_rate": 2.411439792094866e-06, |
| "loss": 0.0482, |
| "step": 236 |
| }, |
| { |
| "epoch": 3.8315217391304346, |
| "grad_norm": 0.22425131499767303, |
| "learning_rate": 2.381938939358742e-06, |
| "loss": 0.0477, |
| "step": 237 |
| }, |
| { |
| "epoch": 3.8478260869565215, |
| "grad_norm": 0.21231728792190552, |
| "learning_rate": 2.3524545545181936e-06, |
| "loss": 0.0437, |
| "step": 238 |
| }, |
| { |
| "epoch": 3.8641304347826084, |
| "grad_norm": 0.24554376304149628, |
| "learning_rate": 2.322990750239733e-06, |
| "loss": 0.0408, |
| "step": 239 |
| }, |
| { |
| "epoch": 3.880434782608696, |
| "grad_norm": 0.20181581377983093, |
| "learning_rate": 2.2935516363191695e-06, |
| "loss": 0.0465, |
| "step": 240 |
| }, |
| { |
| "epoch": 3.8967391304347827, |
| "grad_norm": 0.23197294771671295, |
| "learning_rate": 2.2641413191083445e-06, |
| "loss": 0.0437, |
| "step": 241 |
| }, |
| { |
| "epoch": 3.9130434782608696, |
| "grad_norm": 0.18796613812446594, |
| "learning_rate": 2.234763900942355e-06, |
| "loss": 0.0427, |
| "step": 242 |
| }, |
| { |
| "epoch": 3.9293478260869565, |
| "grad_norm": 0.2553354799747467, |
| "learning_rate": 2.2054234795673336e-06, |
| "loss": 0.0526, |
| "step": 243 |
| }, |
| { |
| "epoch": 3.9456521739130435, |
| "grad_norm": 0.20340357720851898, |
| "learning_rate": 2.1761241475688697e-06, |
| "loss": 0.0405, |
| "step": 244 |
| }, |
| { |
| "epoch": 3.9619565217391304, |
| "grad_norm": 0.2950204908847809, |
| "learning_rate": 2.146869991801146e-06, |
| "loss": 0.0469, |
| "step": 245 |
| }, |
| { |
| "epoch": 3.9782608695652173, |
| "grad_norm": 0.226835697889328, |
| "learning_rate": 2.117665092816885e-06, |
| "loss": 0.0489, |
| "step": 246 |
| }, |
| { |
| "epoch": 3.994565217391304, |
| "grad_norm": 0.2127283215522766, |
| "learning_rate": 2.088513524298165e-06, |
| "loss": 0.0444, |
| "step": 247 |
| }, |
| { |
| "epoch": 4.0, |
| "grad_norm": 0.2127283215522766, |
| "learning_rate": 2.059419352488196e-06, |
| "loss": 0.0514, |
| "step": 248 |
| }, |
| { |
| "epoch": 4.016304347826087, |
| "grad_norm": 0.5015725493431091, |
| "learning_rate": 2.030386635624135e-06, |
| "loss": 0.0402, |
| "step": 249 |
| }, |
| { |
| "epoch": 4.032608695652174, |
| "grad_norm": 0.21895018219947815, |
| "learning_rate": 2.0014194233710193e-06, |
| "loss": 0.0427, |
| "step": 250 |
| }, |
| { |
| "epoch": 4.048913043478261, |
| "grad_norm": 0.2607857584953308, |
| "learning_rate": 1.972521756256895e-06, |
| "loss": 0.0352, |
| "step": 251 |
| }, |
| { |
| "epoch": 4.065217391304348, |
| "grad_norm": 0.2097238153219223, |
| "learning_rate": 1.9436976651092143e-06, |
| "loss": 0.0467, |
| "step": 252 |
| }, |
| { |
| "epoch": 4.081521739130435, |
| "grad_norm": 0.21395832300186157, |
| "learning_rate": 1.9149511704925945e-06, |
| "loss": 0.0436, |
| "step": 253 |
| }, |
| { |
| "epoch": 4.0978260869565215, |
| "grad_norm": 0.24933360517024994, |
| "learning_rate": 1.8862862821480023e-06, |
| "loss": 0.0409, |
| "step": 254 |
| }, |
| { |
| "epoch": 4.114130434782608, |
| "grad_norm": 0.24037982523441315, |
| "learning_rate": 1.8577069984334522e-06, |
| "loss": 0.0445, |
| "step": 255 |
| }, |
| { |
| "epoch": 4.130434782608695, |
| "grad_norm": 0.22139646112918854, |
| "learning_rate": 1.829217305766289e-06, |
| "loss": 0.041, |
| "step": 256 |
| }, |
| { |
| "epoch": 4.146739130434782, |
| "grad_norm": 0.23465107381343842, |
| "learning_rate": 1.8008211780671353e-06, |
| "loss": 0.0456, |
| "step": 257 |
| }, |
| { |
| "epoch": 4.163043478260869, |
| "grad_norm": 0.23133054375648499, |
| "learning_rate": 1.772522576205589e-06, |
| "loss": 0.0428, |
| "step": 258 |
| }, |
| { |
| "epoch": 4.179347826086956, |
| "grad_norm": 0.25171801447868347, |
| "learning_rate": 1.7443254474477328e-06, |
| "loss": 0.0387, |
| "step": 259 |
| }, |
| { |
| "epoch": 4.195652173913044, |
| "grad_norm": 0.22239144146442413, |
| "learning_rate": 1.7162337249055478e-06, |
| "loss": 0.0417, |
| "step": 260 |
| }, |
| { |
| "epoch": 4.211956521739131, |
| "grad_norm": 0.24063155055046082, |
| "learning_rate": 1.6882513269882916e-06, |
| "loss": 0.0451, |
| "step": 261 |
| }, |
| { |
| "epoch": 4.228260869565218, |
| "grad_norm": 0.24732705950737, |
| "learning_rate": 1.6603821568559436e-06, |
| "loss": 0.0428, |
| "step": 262 |
| }, |
| { |
| "epoch": 4.244565217391305, |
| "grad_norm": 0.2546471953392029, |
| "learning_rate": 1.6326301018747623e-06, |
| "loss": 0.0378, |
| "step": 263 |
| }, |
| { |
| "epoch": 4.260869565217392, |
| "grad_norm": 0.2471722811460495, |
| "learning_rate": 1.6049990330750508e-06, |
| "loss": 0.0401, |
| "step": 264 |
| }, |
| { |
| "epoch": 4.2771739130434785, |
| "grad_norm": 0.20250661671161652, |
| "learning_rate": 1.5774928046112025e-06, |
| "loss": 0.0404, |
| "step": 265 |
| }, |
| { |
| "epoch": 4.293478260869565, |
| "grad_norm": 0.21679657697677612, |
| "learning_rate": 1.5501152532241003e-06, |
| "loss": 0.0432, |
| "step": 266 |
| }, |
| { |
| "epoch": 4.309782608695652, |
| "grad_norm": 0.25477948784828186, |
| "learning_rate": 1.522870197705943e-06, |
| "loss": 0.0484, |
| "step": 267 |
| }, |
| { |
| "epoch": 4.326086956521739, |
| "grad_norm": 0.23823678493499756, |
| "learning_rate": 1.495761438367577e-06, |
| "loss": 0.038, |
| "step": 268 |
| }, |
| { |
| "epoch": 4.342391304347826, |
| "grad_norm": 0.19264428317546844, |
| "learning_rate": 1.4687927565084023e-06, |
| "loss": 0.0463, |
| "step": 269 |
| }, |
| { |
| "epoch": 4.358695652173913, |
| "grad_norm": 0.2232806533575058, |
| "learning_rate": 1.4419679138889379e-06, |
| "loss": 0.0367, |
| "step": 270 |
| }, |
| { |
| "epoch": 4.375, |
| "grad_norm": 0.227358877658844, |
| "learning_rate": 1.415290652206105e-06, |
| "loss": 0.0393, |
| "step": 271 |
| }, |
| { |
| "epoch": 4.391304347826087, |
| "grad_norm": 0.21266399323940277, |
| "learning_rate": 1.3887646925713116e-06, |
| "loss": 0.0427, |
| "step": 272 |
| }, |
| { |
| "epoch": 4.407608695652174, |
| "grad_norm": 0.219986230134964, |
| "learning_rate": 1.3623937349914093e-06, |
| "loss": 0.0391, |
| "step": 273 |
| }, |
| { |
| "epoch": 4.423913043478261, |
| "grad_norm": 0.20911090075969696, |
| "learning_rate": 1.3361814578525922e-06, |
| "loss": 0.0414, |
| "step": 274 |
| }, |
| { |
| "epoch": 4.440217391304348, |
| "grad_norm": 0.20766796171665192, |
| "learning_rate": 1.3101315174073162e-06, |
| "loss": 0.0427, |
| "step": 275 |
| }, |
| { |
| "epoch": 4.456521739130435, |
| "grad_norm": 0.2213449478149414, |
| "learning_rate": 1.2842475472642969e-06, |
| "loss": 0.0412, |
| "step": 276 |
| }, |
| { |
| "epoch": 4.4728260869565215, |
| "grad_norm": 0.21896588802337646, |
| "learning_rate": 1.258533157881674e-06, |
| "loss": 0.0419, |
| "step": 277 |
| }, |
| { |
| "epoch": 4.489130434782608, |
| "grad_norm": 0.23026618361473083, |
| "learning_rate": 1.2329919360634003e-06, |
| "loss": 0.0391, |
| "step": 278 |
| }, |
| { |
| "epoch": 4.505434782608695, |
| "grad_norm": 0.2366199791431427, |
| "learning_rate": 1.2076274444589361e-06, |
| "loss": 0.0451, |
| "step": 279 |
| }, |
| { |
| "epoch": 4.521739130434782, |
| "grad_norm": 0.21407200396060944, |
| "learning_rate": 1.182443221066303e-06, |
| "loss": 0.0445, |
| "step": 280 |
| }, |
| { |
| "epoch": 4.538043478260869, |
| "grad_norm": 0.22052663564682007, |
| "learning_rate": 1.1574427787385853e-06, |
| "loss": 0.0454, |
| "step": 281 |
| }, |
| { |
| "epoch": 4.554347826086957, |
| "grad_norm": 0.23922421038150787, |
| "learning_rate": 1.1326296046939334e-06, |
| "loss": 0.0386, |
| "step": 282 |
| }, |
| { |
| "epoch": 4.570652173913043, |
| "grad_norm": 0.21938639879226685, |
| "learning_rate": 1.1080071600291453e-06, |
| "loss": 0.0424, |
| "step": 283 |
| }, |
| { |
| "epoch": 4.586956521739131, |
| "grad_norm": 0.23158493638038635, |
| "learning_rate": 1.083578879236895e-06, |
| "loss": 0.0425, |
| "step": 284 |
| }, |
| { |
| "epoch": 4.603260869565218, |
| "grad_norm": 0.22377173602581024, |
| "learning_rate": 1.0593481697266582e-06, |
| "loss": 0.0431, |
| "step": 285 |
| }, |
| { |
| "epoch": 4.619565217391305, |
| "grad_norm": 0.21283438801765442, |
| "learning_rate": 1.0353184113494386e-06, |
| "loss": 0.0383, |
| "step": 286 |
| }, |
| { |
| "epoch": 4.635869565217392, |
| "grad_norm": 0.24626271426677704, |
| "learning_rate": 1.0114929559263122e-06, |
| "loss": 0.04, |
| "step": 287 |
| }, |
| { |
| "epoch": 4.6521739130434785, |
| "grad_norm": 0.23055487871170044, |
| "learning_rate": 9.878751267809069e-07, |
| "loss": 0.0438, |
| "step": 288 |
| }, |
| { |
| "epoch": 4.668478260869565, |
| "grad_norm": 0.21289804577827454, |
| "learning_rate": 9.644682182758305e-07, |
| "loss": 0.044, |
| "step": 289 |
| }, |
| { |
| "epoch": 4.684782608695652, |
| "grad_norm": 0.25633224844932556, |
| "learning_rate": 9.412754953531664e-07, |
| "loss": 0.0387, |
| "step": 290 |
| }, |
| { |
| "epoch": 4.701086956521739, |
| "grad_norm": 0.21574127674102783, |
| "learning_rate": 9.183001930790483e-07, |
| "loss": 0.0408, |
| "step": 291 |
| }, |
| { |
| "epoch": 4.717391304347826, |
| "grad_norm": 0.2526251971721649, |
| "learning_rate": 8.955455161924217e-07, |
| "loss": 0.0431, |
| "step": 292 |
| }, |
| { |
| "epoch": 4.733695652173913, |
| "grad_norm": 0.2219126969575882, |
| "learning_rate": 8.730146386580157e-07, |
| "loss": 0.0382, |
| "step": 293 |
| }, |
| { |
| "epoch": 4.75, |
| "grad_norm": 0.22477059066295624, |
| "learning_rate": 8.507107032236323e-07, |
| "loss": 0.0387, |
| "step": 294 |
| }, |
| { |
| "epoch": 4.766304347826087, |
| "grad_norm": 0.23549960553646088, |
| "learning_rate": 8.286368209817644e-07, |
| "loss": 0.042, |
| "step": 295 |
| }, |
| { |
| "epoch": 4.782608695652174, |
| "grad_norm": 0.24367327988147736, |
| "learning_rate": 8.067960709356479e-07, |
| "loss": 0.0405, |
| "step": 296 |
| }, |
| { |
| "epoch": 4.798913043478261, |
| "grad_norm": 0.25587689876556396, |
| "learning_rate": 7.851914995697801e-07, |
| "loss": 0.0416, |
| "step": 297 |
| }, |
| { |
| "epoch": 4.815217391304348, |
| "grad_norm": 0.270684152841568, |
| "learning_rate": 7.638261204249783e-07, |
| "loss": 0.0394, |
| "step": 298 |
| }, |
| { |
| "epoch": 4.831521739130435, |
| "grad_norm": 0.22876906394958496, |
| "learning_rate": 7.427029136780333e-07, |
| "loss": 0.0369, |
| "step": 299 |
| }, |
| { |
| "epoch": 4.8478260869565215, |
| "grad_norm": 0.29206568002700806, |
| "learning_rate": 7.218248257260127e-07, |
| "loss": 0.0503, |
| "step": 300 |
| }, |
| { |
| "epoch": 4.864130434782608, |
| "grad_norm": 0.22232329845428467, |
| "learning_rate": 7.011947687752804e-07, |
| "loss": 0.0407, |
| "step": 301 |
| }, |
| { |
| "epoch": 4.880434782608695, |
| "grad_norm": 0.2493494153022766, |
| "learning_rate": 6.808156204352845e-07, |
| "loss": 0.0394, |
| "step": 302 |
| }, |
| { |
| "epoch": 4.896739130434782, |
| "grad_norm": 0.20831190049648285, |
| "learning_rate": 6.60690223317171e-07, |
| "loss": 0.0406, |
| "step": 303 |
| }, |
| { |
| "epoch": 4.913043478260869, |
| "grad_norm": 0.22676558792591095, |
| "learning_rate": 6.40821384637276e-07, |
| "loss": 0.0365, |
| "step": 304 |
| }, |
| { |
| "epoch": 4.929347826086957, |
| "grad_norm": 0.2518041133880615, |
| "learning_rate": 6.212118758255595e-07, |
| "loss": 0.0475, |
| "step": 305 |
| }, |
| { |
| "epoch": 4.945652173913043, |
| "grad_norm": 0.23944813013076782, |
| "learning_rate": 6.018644321390288e-07, |
| "loss": 0.042, |
| "step": 306 |
| }, |
| { |
| "epoch": 4.961956521739131, |
| "grad_norm": 0.2410481721162796, |
| "learning_rate": 5.827817522802065e-07, |
| "loss": 0.0389, |
| "step": 307 |
| }, |
| { |
| "epoch": 4.978260869565218, |
| "grad_norm": 0.2525772750377655, |
| "learning_rate": 5.639664980207024e-07, |
| "loss": 0.0406, |
| "step": 308 |
| }, |
| { |
| "epoch": 4.994565217391305, |
| "grad_norm": 0.276832640171051, |
| "learning_rate": 5.454212938299256e-07, |
| "loss": 0.0439, |
| "step": 309 |
| }, |
| { |
| "epoch": 5.0, |
| "grad_norm": 0.276832640171051, |
| "learning_rate": 5.271487265090163e-07, |
| "loss": 0.0354, |
| "step": 310 |
| }, |
| { |
| "epoch": 5.016304347826087, |
| "grad_norm": 0.5199080109596252, |
| "learning_rate": 5.091513448300142e-07, |
| "loss": 0.0407, |
| "step": 311 |
| }, |
| { |
| "epoch": 5.032608695652174, |
| "grad_norm": 0.270175963640213, |
| "learning_rate": 4.914316591803475e-07, |
| "loss": 0.0402, |
| "step": 312 |
| }, |
| { |
| "epoch": 5.048913043478261, |
| "grad_norm": 0.22836650907993317, |
| "learning_rate": 4.739921412126591e-07, |
| "loss": 0.0382, |
| "step": 313 |
| }, |
| { |
| "epoch": 5.065217391304348, |
| "grad_norm": 0.22557897865772247, |
| "learning_rate": 4.5683522350005505e-07, |
| "loss": 0.0388, |
| "step": 314 |
| }, |
| { |
| "epoch": 5.081521739130435, |
| "grad_norm": 0.23105387389659882, |
| "learning_rate": 4.399632991967867e-07, |
| "loss": 0.0362, |
| "step": 315 |
| }, |
| { |
| "epoch": 5.0978260869565215, |
| "grad_norm": 0.26361796259880066, |
| "learning_rate": 4.23378721704443e-07, |
| "loss": 0.0466, |
| "step": 316 |
| }, |
| { |
| "epoch": 5.114130434782608, |
| "grad_norm": 0.2395378202199936, |
| "learning_rate": 4.070838043436787e-07, |
| "loss": 0.041, |
| "step": 317 |
| }, |
| { |
| "epoch": 5.130434782608695, |
| "grad_norm": 0.20131447911262512, |
| "learning_rate": 3.910808200315433e-07, |
| "loss": 0.0388, |
| "step": 318 |
| }, |
| { |
| "epoch": 5.146739130434782, |
| "grad_norm": 0.2295176386833191, |
| "learning_rate": 3.753720009644371e-07, |
| "loss": 0.037, |
| "step": 319 |
| }, |
| { |
| "epoch": 5.163043478260869, |
| "grad_norm": 0.24505485594272614, |
| "learning_rate": 3.5995953830675004e-07, |
| "loss": 0.043, |
| "step": 320 |
| }, |
| { |
| "epoch": 5.179347826086956, |
| "grad_norm": 0.2312752902507782, |
| "learning_rate": 3.448455818852267e-07, |
| "loss": 0.0369, |
| "step": 321 |
| }, |
| { |
| "epoch": 5.195652173913044, |
| "grad_norm": 0.21873357892036438, |
| "learning_rate": 3.3003223988909234e-07, |
| "loss": 0.0398, |
| "step": 322 |
| }, |
| { |
| "epoch": 5.211956521739131, |
| "grad_norm": 0.22271527349948883, |
| "learning_rate": 3.1552157857599327e-07, |
| "loss": 0.0377, |
| "step": 323 |
| }, |
| { |
| "epoch": 5.228260869565218, |
| "grad_norm": 0.22470292448997498, |
| "learning_rate": 3.0131562198377763e-07, |
| "loss": 0.0411, |
| "step": 324 |
| }, |
| { |
| "epoch": 5.244565217391305, |
| "grad_norm": 0.22748963534832, |
| "learning_rate": 2.874163516481732e-07, |
| "loss": 0.0346, |
| "step": 325 |
| }, |
| { |
| "epoch": 5.260869565217392, |
| "grad_norm": 0.27392780780792236, |
| "learning_rate": 2.7382570632638853e-07, |
| "loss": 0.0426, |
| "step": 326 |
| }, |
| { |
| "epoch": 5.2771739130434785, |
| "grad_norm": 0.2509923577308655, |
| "learning_rate": 2.605455817266861e-07, |
| "loss": 0.0375, |
| "step": 327 |
| }, |
| { |
| "epoch": 5.293478260869565, |
| "grad_norm": 0.20978674292564392, |
| "learning_rate": 2.4757783024395244e-07, |
| "loss": 0.0333, |
| "step": 328 |
| }, |
| { |
| "epoch": 5.309782608695652, |
| "grad_norm": 0.23130132257938385, |
| "learning_rate": 2.3492426070131746e-07, |
| "loss": 0.0429, |
| "step": 329 |
| }, |
| { |
| "epoch": 5.326086956521739, |
| "grad_norm": 0.2261745035648346, |
| "learning_rate": 2.2258663809784892e-07, |
| "loss": 0.0376, |
| "step": 330 |
| }, |
| { |
| "epoch": 5.342391304347826, |
| "grad_norm": 0.21033048629760742, |
| "learning_rate": 2.1056668336235624e-07, |
| "loss": 0.039, |
| "step": 331 |
| }, |
| { |
| "epoch": 5.358695652173913, |
| "grad_norm": 0.23950928449630737, |
| "learning_rate": 1.9886607311334987e-07, |
| "loss": 0.0375, |
| "step": 332 |
| }, |
| { |
| "epoch": 5.375, |
| "grad_norm": 0.2400600165128708, |
| "learning_rate": 1.8748643942516882e-07, |
| "loss": 0.0447, |
| "step": 333 |
| }, |
| { |
| "epoch": 5.391304347826087, |
| "grad_norm": 0.24937079846858978, |
| "learning_rate": 1.764293696003358e-07, |
| "loss": 0.0439, |
| "step": 334 |
| }, |
| { |
| "epoch": 5.407608695652174, |
| "grad_norm": 0.2523324489593506, |
| "learning_rate": 1.656964059481453e-07, |
| "loss": 0.0368, |
| "step": 335 |
| }, |
| { |
| "epoch": 5.423913043478261, |
| "grad_norm": 0.24901482462882996, |
| "learning_rate": 1.552890455695369e-07, |
| "loss": 0.0382, |
| "step": 336 |
| }, |
| { |
| "epoch": 5.440217391304348, |
| "grad_norm": 0.21542105078697205, |
| "learning_rate": 1.4520874014826464e-07, |
| "loss": 0.04, |
| "step": 337 |
| }, |
| { |
| "epoch": 5.456521739130435, |
| "grad_norm": 0.24464815855026245, |
| "learning_rate": 1.3545689574841341e-07, |
| "loss": 0.0358, |
| "step": 338 |
| }, |
| { |
| "epoch": 5.4728260869565215, |
| "grad_norm": 0.24528922140598297, |
| "learning_rate": 1.2603487261826726e-07, |
| "loss": 0.038, |
| "step": 339 |
| }, |
| { |
| "epoch": 5.489130434782608, |
| "grad_norm": 0.22918996214866638, |
| "learning_rate": 1.1694398500057714e-07, |
| "loss": 0.0421, |
| "step": 340 |
| }, |
| { |
| "epoch": 5.505434782608695, |
| "grad_norm": 0.2410709410905838, |
| "learning_rate": 1.081855009492383e-07, |
| "loss": 0.0395, |
| "step": 341 |
| }, |
| { |
| "epoch": 5.521739130434782, |
| "grad_norm": 0.22620242834091187, |
| "learning_rate": 9.976064215241859e-08, |
| "loss": 0.0392, |
| "step": 342 |
| }, |
| { |
| "epoch": 5.538043478260869, |
| "grad_norm": 0.24372537434101105, |
| "learning_rate": 9.167058376214621e-08, |
| "loss": 0.0379, |
| "step": 343 |
| }, |
| { |
| "epoch": 5.554347826086957, |
| "grad_norm": 0.21486566960811615, |
| "learning_rate": 8.391645423039357e-08, |
| "loss": 0.0377, |
| "step": 344 |
| }, |
| { |
| "epoch": 5.570652173913043, |
| "grad_norm": 0.22310517728328705, |
| "learning_rate": 7.649933515167407e-08, |
| "loss": 0.0386, |
| "step": 345 |
| }, |
| { |
| "epoch": 5.586956521739131, |
| "grad_norm": 0.23468376696109772, |
| "learning_rate": 6.94202611121736e-08, |
| "loss": 0.0398, |
| "step": 346 |
| }, |
| { |
| "epoch": 5.603260869565218, |
| "grad_norm": 0.24592317640781403, |
| "learning_rate": 6.268021954544095e-08, |
| "loss": 0.0346, |
| "step": 347 |
| }, |
| { |
| "epoch": 5.619565217391305, |
| "grad_norm": 0.2240305244922638, |
| "learning_rate": 5.628015059465364e-08, |
| "loss": 0.0483, |
| "step": 348 |
| }, |
| { |
| "epoch": 5.635869565217392, |
| "grad_norm": 0.2843106687068939, |
| "learning_rate": 5.022094698148072e-08, |
| "loss": 0.0427, |
| "step": 349 |
| }, |
| { |
| "epoch": 5.6521739130434785, |
| "grad_norm": 0.2484791874885559, |
| "learning_rate": 4.450345388156141e-08, |
| "loss": 0.0429, |
| "step": 350 |
| }, |
| { |
| "epoch": 5.668478260869565, |
| "grad_norm": 0.23303182423114777, |
| "learning_rate": 3.9128468806614304e-08, |
| "loss": 0.0355, |
| "step": 351 |
| }, |
| { |
| "epoch": 5.684782608695652, |
| "grad_norm": 0.22100433707237244, |
| "learning_rate": 3.4096741493194196e-08, |
| "loss": 0.0386, |
| "step": 352 |
| }, |
| { |
| "epoch": 5.701086956521739, |
| "grad_norm": 0.2393917292356491, |
| "learning_rate": 2.940897379811597e-08, |
| "loss": 0.0495, |
| "step": 353 |
| }, |
| { |
| "epoch": 5.717391304347826, |
| "grad_norm": 0.27596771717071533, |
| "learning_rate": 2.506581960055432e-08, |
| "loss": 0.0388, |
| "step": 354 |
| }, |
| { |
| "epoch": 5.733695652173913, |
| "grad_norm": 0.23731505870819092, |
| "learning_rate": 2.106788471083615e-08, |
| "loss": 0.039, |
| "step": 355 |
| }, |
| { |
| "epoch": 5.75, |
| "grad_norm": 0.23420853912830353, |
| "learning_rate": 1.7415726785939836e-08, |
| "loss": 0.0466, |
| "step": 356 |
| }, |
| { |
| "epoch": 5.766304347826087, |
| "grad_norm": 0.2571556866168976, |
| "learning_rate": 1.4109855251708272e-08, |
| "loss": 0.0398, |
| "step": 357 |
| }, |
| { |
| "epoch": 5.782608695652174, |
| "grad_norm": 0.25577929615974426, |
| "learning_rate": 1.115073123179128e-08, |
| "loss": 0.0418, |
| "step": 358 |
| }, |
| { |
| "epoch": 5.798913043478261, |
| "grad_norm": 0.25703752040863037, |
| "learning_rate": 8.538767483325384e-09, |
| "loss": 0.0382, |
| "step": 359 |
| }, |
| { |
| "epoch": 5.815217391304348, |
| "grad_norm": 0.21773234009742737, |
| "learning_rate": 6.274328339360702e-09, |
| "loss": 0.0374, |
| "step": 360 |
| }, |
| { |
| "epoch": 5.831521739130435, |
| "grad_norm": 0.24684062600135803, |
| "learning_rate": 4.357729658039378e-09, |
| "loss": 0.041, |
| "step": 361 |
| }, |
| { |
| "epoch": 5.8478260869565215, |
| "grad_norm": 0.20334966480731964, |
| "learning_rate": 2.789238778540537e-09, |
| "loss": 0.0386, |
| "step": 362 |
| }, |
| { |
| "epoch": 5.864130434782608, |
| "grad_norm": 0.2612401247024536, |
| "learning_rate": 1.5690744837873473e-09, |
| "loss": 0.0415, |
| "step": 363 |
| }, |
| { |
| "epoch": 5.880434782608695, |
| "grad_norm": 0.2394614964723587, |
| "learning_rate": 6.974069699314246e-10, |
| "loss": 0.0376, |
| "step": 364 |
| }, |
| { |
| "epoch": 5.896739130434782, |
| "grad_norm": 0.23148567974567413, |
| "learning_rate": 1.743578226129361e-10, |
| "loss": 0.0387, |
| "step": 365 |
| }, |
| { |
| "epoch": 5.913043478260869, |
| "grad_norm": 0.24650397896766663, |
| "learning_rate": 0.0, |
| "loss": 0.0396, |
| "step": 366 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 366, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 6, |
| "save_steps": 61, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 8.114918032020603e+17, |
| "train_batch_size": 4, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|