| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 5.95778364116095, | |
| "eval_steps": 500, | |
| "global_step": 756, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0079155672823219, | |
| "grad_norm": 34.582191467285156, | |
| "learning_rate": 5.0000000000000004e-08, | |
| "loss": 2.1743, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0158311345646438, | |
| "grad_norm": 35.2754020690918, | |
| "learning_rate": 1.0000000000000001e-07, | |
| "loss": 2.195, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.023746701846965697, | |
| "grad_norm": 35.12391662597656, | |
| "learning_rate": 1.5000000000000002e-07, | |
| "loss": 2.1971, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.0316622691292876, | |
| "grad_norm": 34.45759201049805, | |
| "learning_rate": 2.0000000000000002e-07, | |
| "loss": 2.1701, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.0395778364116095, | |
| "grad_norm": 35.35009002685547, | |
| "learning_rate": 2.5000000000000004e-07, | |
| "loss": 2.2099, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.047493403693931395, | |
| "grad_norm": 35.039520263671875, | |
| "learning_rate": 3.0000000000000004e-07, | |
| "loss": 2.2169, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.055408970976253295, | |
| "grad_norm": 35.103248596191406, | |
| "learning_rate": 3.5000000000000004e-07, | |
| "loss": 2.2121, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.0633245382585752, | |
| "grad_norm": 34.65024185180664, | |
| "learning_rate": 4.0000000000000003e-07, | |
| "loss": 2.1715, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.0712401055408971, | |
| "grad_norm": 35.055023193359375, | |
| "learning_rate": 4.5000000000000003e-07, | |
| "loss": 2.1571, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.079155672823219, | |
| "grad_norm": 34.82114028930664, | |
| "learning_rate": 5.000000000000001e-07, | |
| "loss": 2.1428, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.0870712401055409, | |
| "grad_norm": 33.663883209228516, | |
| "learning_rate": 5.5e-07, | |
| "loss": 2.0656, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.09498680738786279, | |
| "grad_norm": 33.614967346191406, | |
| "learning_rate": 6.000000000000001e-07, | |
| "loss": 2.05, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.10290237467018469, | |
| "grad_norm": 32.66334915161133, | |
| "learning_rate": 6.5e-07, | |
| "loss": 1.9905, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.11081794195250659, | |
| "grad_norm": 31.83934211730957, | |
| "learning_rate": 7.000000000000001e-07, | |
| "loss": 1.8981, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.11873350923482849, | |
| "grad_norm": 32.067840576171875, | |
| "learning_rate": 7.5e-07, | |
| "loss": 1.8803, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.1266490765171504, | |
| "grad_norm": 31.775592803955078, | |
| "learning_rate": 8.000000000000001e-07, | |
| "loss": 1.8215, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.1345646437994723, | |
| "grad_norm": 30.205060958862305, | |
| "learning_rate": 8.500000000000001e-07, | |
| "loss": 1.6654, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.1424802110817942, | |
| "grad_norm": 30.699304580688477, | |
| "learning_rate": 9.000000000000001e-07, | |
| "loss": 1.6149, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.1503957783641161, | |
| "grad_norm": 30.16368293762207, | |
| "learning_rate": 9.500000000000001e-07, | |
| "loss": 1.4905, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.158311345646438, | |
| "grad_norm": 30.227079391479492, | |
| "learning_rate": 1.0000000000000002e-06, | |
| "loss": 1.3845, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.1662269129287599, | |
| "grad_norm": 30.42757797241211, | |
| "learning_rate": 1.0500000000000001e-06, | |
| "loss": 1.2462, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.1741424802110818, | |
| "grad_norm": 30.44344711303711, | |
| "learning_rate": 1.1e-06, | |
| "loss": 1.0948, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.1820580474934037, | |
| "grad_norm": 30.735376358032227, | |
| "learning_rate": 1.1500000000000002e-06, | |
| "loss": 0.9618, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.18997361477572558, | |
| "grad_norm": 30.048675537109375, | |
| "learning_rate": 1.2000000000000002e-06, | |
| "loss": 0.7738, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.19788918205804748, | |
| "grad_norm": 30.161460876464844, | |
| "learning_rate": 1.25e-06, | |
| "loss": 0.6424, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.20580474934036938, | |
| "grad_norm": 28.648540496826172, | |
| "learning_rate": 1.3e-06, | |
| "loss": 0.4602, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.21372031662269128, | |
| "grad_norm": 25.193084716796875, | |
| "learning_rate": 1.3500000000000002e-06, | |
| "loss": 0.3309, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.22163588390501318, | |
| "grad_norm": 18.437116622924805, | |
| "learning_rate": 1.4000000000000001e-06, | |
| "loss": 0.2206, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.22955145118733508, | |
| "grad_norm": 12.642715454101562, | |
| "learning_rate": 1.45e-06, | |
| "loss": 0.139, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.23746701846965698, | |
| "grad_norm": 6.1605305671691895, | |
| "learning_rate": 1.5e-06, | |
| "loss": 0.0982, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.24538258575197888, | |
| "grad_norm": 2.986103057861328, | |
| "learning_rate": 1.5500000000000002e-06, | |
| "loss": 0.0707, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.2532981530343008, | |
| "grad_norm": 2.030493974685669, | |
| "learning_rate": 1.6000000000000001e-06, | |
| "loss": 0.0637, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.2612137203166227, | |
| "grad_norm": 1.5127642154693604, | |
| "learning_rate": 1.6500000000000003e-06, | |
| "loss": 0.0616, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.2691292875989446, | |
| "grad_norm": 1.8087722063064575, | |
| "learning_rate": 1.7000000000000002e-06, | |
| "loss": 0.0555, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.2770448548812665, | |
| "grad_norm": 1.3533340692520142, | |
| "learning_rate": 1.75e-06, | |
| "loss": 0.0517, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.2849604221635884, | |
| "grad_norm": 1.3595997095108032, | |
| "learning_rate": 1.8000000000000001e-06, | |
| "loss": 0.0537, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.2928759894459103, | |
| "grad_norm": 1.2778421640396118, | |
| "learning_rate": 1.85e-06, | |
| "loss": 0.0483, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.3007915567282322, | |
| "grad_norm": 1.0834269523620605, | |
| "learning_rate": 1.9000000000000002e-06, | |
| "loss": 0.0471, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.3087071240105541, | |
| "grad_norm": 1.175179362297058, | |
| "learning_rate": 1.9500000000000004e-06, | |
| "loss": 0.0528, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.316622691292876, | |
| "grad_norm": 0.9915143847465515, | |
| "learning_rate": 2.0000000000000003e-06, | |
| "loss": 0.0525, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.3245382585751979, | |
| "grad_norm": 0.7996736168861389, | |
| "learning_rate": 2.05e-06, | |
| "loss": 0.0479, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.3324538258575198, | |
| "grad_norm": 0.7375659942626953, | |
| "learning_rate": 2.1000000000000002e-06, | |
| "loss": 0.0509, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.3403693931398417, | |
| "grad_norm": 0.7712483406066895, | |
| "learning_rate": 2.15e-06, | |
| "loss": 0.0484, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.3482849604221636, | |
| "grad_norm": 0.9321597218513489, | |
| "learning_rate": 2.2e-06, | |
| "loss": 0.0448, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.3562005277044855, | |
| "grad_norm": 1.02247953414917, | |
| "learning_rate": 2.25e-06, | |
| "loss": 0.0484, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.3641160949868074, | |
| "grad_norm": 0.857718825340271, | |
| "learning_rate": 2.3000000000000004e-06, | |
| "loss": 0.0425, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.3720316622691293, | |
| "grad_norm": 1.0721040964126587, | |
| "learning_rate": 2.35e-06, | |
| "loss": 0.0472, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.37994722955145116, | |
| "grad_norm": 0.811705470085144, | |
| "learning_rate": 2.4000000000000003e-06, | |
| "loss": 0.039, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.38786279683377306, | |
| "grad_norm": 0.9680790901184082, | |
| "learning_rate": 2.4500000000000003e-06, | |
| "loss": 0.0423, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.39577836411609496, | |
| "grad_norm": 0.9470660090446472, | |
| "learning_rate": 2.5e-06, | |
| "loss": 0.0411, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.40369393139841686, | |
| "grad_norm": 0.8540447950363159, | |
| "learning_rate": 2.55e-06, | |
| "loss": 0.049, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.41160949868073876, | |
| "grad_norm": 1.39769446849823, | |
| "learning_rate": 2.6e-06, | |
| "loss": 0.0478, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.41952506596306066, | |
| "grad_norm": 0.8914775848388672, | |
| "learning_rate": 2.6500000000000005e-06, | |
| "loss": 0.0415, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.42744063324538256, | |
| "grad_norm": 0.759353518486023, | |
| "learning_rate": 2.7000000000000004e-06, | |
| "loss": 0.046, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.43535620052770446, | |
| "grad_norm": 0.8632487058639526, | |
| "learning_rate": 2.7500000000000004e-06, | |
| "loss": 0.0435, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.44327176781002636, | |
| "grad_norm": 0.6662724018096924, | |
| "learning_rate": 2.8000000000000003e-06, | |
| "loss": 0.0496, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.45118733509234826, | |
| "grad_norm": 0.8853585124015808, | |
| "learning_rate": 2.85e-06, | |
| "loss": 0.0384, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.45910290237467016, | |
| "grad_norm": 0.7003194093704224, | |
| "learning_rate": 2.9e-06, | |
| "loss": 0.0446, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.46701846965699206, | |
| "grad_norm": 0.8177786469459534, | |
| "learning_rate": 2.95e-06, | |
| "loss": 0.0417, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.47493403693931396, | |
| "grad_norm": 0.9565925598144531, | |
| "learning_rate": 3e-06, | |
| "loss": 0.044, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.48284960422163586, | |
| "grad_norm": 0.7401254177093506, | |
| "learning_rate": 3.05e-06, | |
| "loss": 0.0439, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.49076517150395776, | |
| "grad_norm": 0.9952861666679382, | |
| "learning_rate": 3.1000000000000004e-06, | |
| "loss": 0.0371, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.49868073878627966, | |
| "grad_norm": 0.9622153639793396, | |
| "learning_rate": 3.1500000000000003e-06, | |
| "loss": 0.0457, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.5065963060686016, | |
| "grad_norm": 0.7078511118888855, | |
| "learning_rate": 3.2000000000000003e-06, | |
| "loss": 0.0396, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.5145118733509235, | |
| "grad_norm": 0.9370516538619995, | |
| "learning_rate": 3.2500000000000002e-06, | |
| "loss": 0.0421, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.5224274406332454, | |
| "grad_norm": 0.6465123891830444, | |
| "learning_rate": 3.3000000000000006e-06, | |
| "loss": 0.0341, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.5303430079155673, | |
| "grad_norm": 0.7659086585044861, | |
| "learning_rate": 3.3500000000000005e-06, | |
| "loss": 0.0429, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.5382585751978892, | |
| "grad_norm": 0.7772212028503418, | |
| "learning_rate": 3.4000000000000005e-06, | |
| "loss": 0.036, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.5461741424802111, | |
| "grad_norm": 0.7115844488143921, | |
| "learning_rate": 3.45e-06, | |
| "loss": 0.0327, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.554089709762533, | |
| "grad_norm": 0.6542766690254211, | |
| "learning_rate": 3.5e-06, | |
| "loss": 0.0362, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.5620052770448549, | |
| "grad_norm": 0.7394366264343262, | |
| "learning_rate": 3.5500000000000003e-06, | |
| "loss": 0.0426, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.5699208443271768, | |
| "grad_norm": 0.8163363337516785, | |
| "learning_rate": 3.6000000000000003e-06, | |
| "loss": 0.0344, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.5778364116094987, | |
| "grad_norm": 0.7115806937217712, | |
| "learning_rate": 3.65e-06, | |
| "loss": 0.0379, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.5857519788918206, | |
| "grad_norm": 0.6192121505737305, | |
| "learning_rate": 3.7e-06, | |
| "loss": 0.0276, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.5936675461741425, | |
| "grad_norm": 0.822684645652771, | |
| "learning_rate": 3.7500000000000005e-06, | |
| "loss": 0.0308, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.6015831134564644, | |
| "grad_norm": 0.8095663189888, | |
| "learning_rate": 3.8000000000000005e-06, | |
| "loss": 0.0303, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.6094986807387863, | |
| "grad_norm": 0.7700952887535095, | |
| "learning_rate": 3.85e-06, | |
| "loss": 0.0328, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.6174142480211082, | |
| "grad_norm": 0.7777529358863831, | |
| "learning_rate": 3.900000000000001e-06, | |
| "loss": 0.0344, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.6253298153034301, | |
| "grad_norm": 0.826386034488678, | |
| "learning_rate": 3.95e-06, | |
| "loss": 0.033, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.633245382585752, | |
| "grad_norm": 0.788942277431488, | |
| "learning_rate": 4.000000000000001e-06, | |
| "loss": 0.0331, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.6411609498680739, | |
| "grad_norm": 0.7351260185241699, | |
| "learning_rate": 4.05e-06, | |
| "loss": 0.0346, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.6490765171503958, | |
| "grad_norm": 0.5737812519073486, | |
| "learning_rate": 4.1e-06, | |
| "loss": 0.027, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.6569920844327177, | |
| "grad_norm": 0.7053394913673401, | |
| "learning_rate": 4.15e-06, | |
| "loss": 0.034, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.6649076517150396, | |
| "grad_norm": 0.7508617639541626, | |
| "learning_rate": 4.2000000000000004e-06, | |
| "loss": 0.0323, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.6728232189973615, | |
| "grad_norm": 1.0235975980758667, | |
| "learning_rate": 4.25e-06, | |
| "loss": 0.0395, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.6807387862796834, | |
| "grad_norm": 0.6522803902626038, | |
| "learning_rate": 4.3e-06, | |
| "loss": 0.0262, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.6886543535620053, | |
| "grad_norm": 1.182160496711731, | |
| "learning_rate": 4.350000000000001e-06, | |
| "loss": 0.0387, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.6965699208443272, | |
| "grad_norm": 0.6571248769760132, | |
| "learning_rate": 4.4e-06, | |
| "loss": 0.0262, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.7044854881266491, | |
| "grad_norm": 0.6641517877578735, | |
| "learning_rate": 4.450000000000001e-06, | |
| "loss": 0.0265, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.712401055408971, | |
| "grad_norm": 0.8751306533813477, | |
| "learning_rate": 4.5e-06, | |
| "loss": 0.0307, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.7203166226912929, | |
| "grad_norm": 0.7627044320106506, | |
| "learning_rate": 4.5500000000000005e-06, | |
| "loss": 0.0332, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.7282321899736148, | |
| "grad_norm": 0.7124688625335693, | |
| "learning_rate": 4.600000000000001e-06, | |
| "loss": 0.0299, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.7361477572559367, | |
| "grad_norm": 0.7688237428665161, | |
| "learning_rate": 4.65e-06, | |
| "loss": 0.0258, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.7440633245382586, | |
| "grad_norm": 0.8663469552993774, | |
| "learning_rate": 4.7e-06, | |
| "loss": 0.0342, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.7519788918205804, | |
| "grad_norm": 0.6536213755607605, | |
| "learning_rate": 4.75e-06, | |
| "loss": 0.0226, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.7598944591029023, | |
| "grad_norm": 0.5616201758384705, | |
| "learning_rate": 4.800000000000001e-06, | |
| "loss": 0.022, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.7678100263852242, | |
| "grad_norm": 0.7985079884529114, | |
| "learning_rate": 4.85e-06, | |
| "loss": 0.0305, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.7757255936675461, | |
| "grad_norm": 0.726844847202301, | |
| "learning_rate": 4.9000000000000005e-06, | |
| "loss": 0.0243, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.783641160949868, | |
| "grad_norm": 0.656874418258667, | |
| "learning_rate": 4.95e-06, | |
| "loss": 0.0243, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.7915567282321899, | |
| "grad_norm": 0.8744286894798279, | |
| "learning_rate": 5e-06, | |
| "loss": 0.0261, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.7994722955145118, | |
| "grad_norm": 1.0138187408447266, | |
| "learning_rate": 4.9999713317456065e-06, | |
| "loss": 0.0254, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.8073878627968337, | |
| "grad_norm": 0.6964263319969177, | |
| "learning_rate": 4.9998853276399215e-06, | |
| "loss": 0.0263, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.8153034300791556, | |
| "grad_norm": 0.6374551057815552, | |
| "learning_rate": 4.999741989655415e-06, | |
| "loss": 0.0228, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.8232189973614775, | |
| "grad_norm": 0.6274930834770203, | |
| "learning_rate": 4.999541321079486e-06, | |
| "loss": 0.0286, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.8311345646437994, | |
| "grad_norm": 0.6344143748283386, | |
| "learning_rate": 4.99928332651439e-06, | |
| "loss": 0.0258, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.8390501319261213, | |
| "grad_norm": 0.8304360508918762, | |
| "learning_rate": 4.998968011877129e-06, | |
| "loss": 0.022, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.8469656992084432, | |
| "grad_norm": 0.6463897228240967, | |
| "learning_rate": 4.998595384399319e-06, | |
| "loss": 0.0184, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.8548812664907651, | |
| "grad_norm": 0.5647016763687134, | |
| "learning_rate": 4.998165452627025e-06, | |
| "loss": 0.0221, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.862796833773087, | |
| "grad_norm": 0.5621638298034668, | |
| "learning_rate": 4.997678226420561e-06, | |
| "loss": 0.022, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.8707124010554089, | |
| "grad_norm": 0.8295672535896301, | |
| "learning_rate": 4.997133716954266e-06, | |
| "loss": 0.0206, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.8786279683377308, | |
| "grad_norm": 0.8057272434234619, | |
| "learning_rate": 4.99653193671625e-06, | |
| "loss": 0.0204, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.8865435356200527, | |
| "grad_norm": 0.6956275105476379, | |
| "learning_rate": 4.995872899508103e-06, | |
| "loss": 0.0186, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.8944591029023746, | |
| "grad_norm": 0.7873161435127258, | |
| "learning_rate": 4.995156620444584e-06, | |
| "loss": 0.0149, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.9023746701846965, | |
| "grad_norm": 0.9187519550323486, | |
| "learning_rate": 4.994383115953266e-06, | |
| "loss": 0.0183, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.9102902374670184, | |
| "grad_norm": 0.5987403392791748, | |
| "learning_rate": 4.9935524037741705e-06, | |
| "loss": 0.0158, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.9182058047493403, | |
| "grad_norm": 0.6011790037155151, | |
| "learning_rate": 4.992664502959351e-06, | |
| "loss": 0.016, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.9261213720316622, | |
| "grad_norm": 0.8429304957389832, | |
| "learning_rate": 4.991719433872461e-06, | |
| "loss": 0.0107, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.9340369393139841, | |
| "grad_norm": 0.8618797659873962, | |
| "learning_rate": 4.990717218188286e-06, | |
| "loss": 0.0162, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.941952506596306, | |
| "grad_norm": 1.1907824277877808, | |
| "learning_rate": 4.989657878892245e-06, | |
| "loss": 0.0173, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.9498680738786279, | |
| "grad_norm": 0.7317540645599365, | |
| "learning_rate": 4.988541440279862e-06, | |
| "loss": 0.0183, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.9577836411609498, | |
| "grad_norm": 0.6827302575111389, | |
| "learning_rate": 4.987367927956218e-06, | |
| "loss": 0.0192, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.9656992084432717, | |
| "grad_norm": 0.6512569189071655, | |
| "learning_rate": 4.986137368835351e-06, | |
| "loss": 0.0169, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.9736147757255936, | |
| "grad_norm": 0.5439648032188416, | |
| "learning_rate": 4.984849791139647e-06, | |
| "loss": 0.0136, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.9815303430079155, | |
| "grad_norm": 0.5652760863304138, | |
| "learning_rate": 4.983505224399188e-06, | |
| "loss": 0.0171, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.9894459102902374, | |
| "grad_norm": 0.6217207908630371, | |
| "learning_rate": 4.9821036994510816e-06, | |
| "loss": 0.0175, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.9973614775725593, | |
| "grad_norm": 0.5073563456535339, | |
| "learning_rate": 4.980645248438746e-06, | |
| "loss": 0.0166, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 0.5073563456535339, | |
| "learning_rate": 4.979129904811177e-06, | |
| "loss": 0.0103, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 1.007915567282322, | |
| "grad_norm": 0.9930127263069153, | |
| "learning_rate": 4.977557703322178e-06, | |
| "loss": 0.0116, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 1.0158311345646438, | |
| "grad_norm": 0.4781687259674072, | |
| "learning_rate": 4.975928680029571e-06, | |
| "loss": 0.0097, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 1.0237467018469657, | |
| "grad_norm": 0.5260384678840637, | |
| "learning_rate": 4.974242872294354e-06, | |
| "loss": 0.0131, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 1.0316622691292876, | |
| "grad_norm": 0.6748254895210266, | |
| "learning_rate": 4.972500318779864e-06, | |
| "loss": 0.0093, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 1.0395778364116095, | |
| "grad_norm": 0.42848217487335205, | |
| "learning_rate": 4.970701059450872e-06, | |
| "loss": 0.0086, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 1.0474934036939314, | |
| "grad_norm": 0.8815901279449463, | |
| "learning_rate": 4.968845135572678e-06, | |
| "loss": 0.0103, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 1.0554089709762533, | |
| "grad_norm": 0.5253162980079651, | |
| "learning_rate": 4.96693258971016e-06, | |
| "loss": 0.0088, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 1.0633245382585752, | |
| "grad_norm": 0.6259243488311768, | |
| "learning_rate": 4.9649634657268e-06, | |
| "loss": 0.0074, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 1.071240105540897, | |
| "grad_norm": 1.0937525033950806, | |
| "learning_rate": 4.962937808783675e-06, | |
| "loss": 0.0115, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 1.079155672823219, | |
| "grad_norm": 1.0612727403640747, | |
| "learning_rate": 4.960855665338424e-06, | |
| "loss": 0.009, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 1.087071240105541, | |
| "grad_norm": 0.9188615679740906, | |
| "learning_rate": 4.958717083144182e-06, | |
| "loss": 0.0127, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 1.0949868073878628, | |
| "grad_norm": 0.6099897623062134, | |
| "learning_rate": 4.956522111248483e-06, | |
| "loss": 0.0088, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 1.1029023746701847, | |
| "grad_norm": 0.6967251896858215, | |
| "learning_rate": 4.954270799992138e-06, | |
| "loss": 0.0088, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 1.1108179419525066, | |
| "grad_norm": 0.6113755702972412, | |
| "learning_rate": 4.9519632010080765e-06, | |
| "loss": 0.0079, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 1.1187335092348285, | |
| "grad_norm": 0.6708074808120728, | |
| "learning_rate": 4.9495993672201675e-06, | |
| "loss": 0.0093, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 1.1266490765171504, | |
| "grad_norm": 0.425584614276886, | |
| "learning_rate": 4.947179352842001e-06, | |
| "loss": 0.0093, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 1.1345646437994723, | |
| "grad_norm": 0.3952641785144806, | |
| "learning_rate": 4.944703213375648e-06, | |
| "loss": 0.0057, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 1.1424802110817942, | |
| "grad_norm": 0.39029237627983093, | |
| "learning_rate": 4.942171005610385e-06, | |
| "loss": 0.0106, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 1.150395778364116, | |
| "grad_norm": 0.5346894264221191, | |
| "learning_rate": 4.939582787621394e-06, | |
| "loss": 0.0062, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 1.158311345646438, | |
| "grad_norm": 0.48500752449035645, | |
| "learning_rate": 4.936938618768427e-06, | |
| "loss": 0.0077, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 1.16622691292876, | |
| "grad_norm": 0.5172581076622009, | |
| "learning_rate": 4.934238559694448e-06, | |
| "loss": 0.0122, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 1.1741424802110818, | |
| "grad_norm": 0.571660578250885, | |
| "learning_rate": 4.9314826723242425e-06, | |
| "loss": 0.0107, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 1.1820580474934037, | |
| "grad_norm": 0.558224081993103, | |
| "learning_rate": 4.928671019862995e-06, | |
| "loss": 0.0062, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 1.1899736147757256, | |
| "grad_norm": 0.46116045117378235, | |
| "learning_rate": 4.925803666794839e-06, | |
| "loss": 0.0089, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 1.1978891820580475, | |
| "grad_norm": 0.5193414092063904, | |
| "learning_rate": 4.92288067888138e-06, | |
| "loss": 0.0084, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 1.2058047493403694, | |
| "grad_norm": 0.6195410490036011, | |
| "learning_rate": 4.919902123160187e-06, | |
| "loss": 0.0115, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 1.2137203166226913, | |
| "grad_norm": 0.5878890752792358, | |
| "learning_rate": 4.9168680679432565e-06, | |
| "loss": 0.0075, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 1.2216358839050132, | |
| "grad_norm": 0.3049341142177582, | |
| "learning_rate": 4.913778582815439e-06, | |
| "loss": 0.0071, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 1.229551451187335, | |
| "grad_norm": 0.9924205541610718, | |
| "learning_rate": 4.9106337386328524e-06, | |
| "loss": 0.0121, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 1.237467018469657, | |
| "grad_norm": 0.44824329018592834, | |
| "learning_rate": 4.907433607521252e-06, | |
| "loss": 0.0066, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 1.245382585751979, | |
| "grad_norm": 0.7608261108398438, | |
| "learning_rate": 4.904178262874374e-06, | |
| "loss": 0.0046, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 1.2532981530343008, | |
| "grad_norm": 0.4237998425960541, | |
| "learning_rate": 4.900867779352258e-06, | |
| "loss": 0.0032, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 1.2612137203166227, | |
| "grad_norm": 0.9337185025215149, | |
| "learning_rate": 4.897502232879533e-06, | |
| "loss": 0.0061, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 1.2691292875989446, | |
| "grad_norm": 0.6228697299957275, | |
| "learning_rate": 4.89408170064367e-06, | |
| "loss": 0.0057, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 1.2770448548812665, | |
| "grad_norm": 0.41712895035743713, | |
| "learning_rate": 4.890606261093221e-06, | |
| "loss": 0.0046, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 1.2849604221635884, | |
| "grad_norm": 0.6179379820823669, | |
| "learning_rate": 4.887075993936014e-06, | |
| "loss": 0.0045, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 1.2928759894459103, | |
| "grad_norm": 0.3739337921142578, | |
| "learning_rate": 4.883490980137327e-06, | |
| "loss": 0.0061, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 1.3007915567282322, | |
| "grad_norm": 0.5171535015106201, | |
| "learning_rate": 4.8798513019180295e-06, | |
| "loss": 0.0073, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 1.3087071240105541, | |
| "grad_norm": 0.640513002872467, | |
| "learning_rate": 4.876157042752698e-06, | |
| "loss": 0.0043, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 1.316622691292876, | |
| "grad_norm": 0.4904448390007019, | |
| "learning_rate": 4.872408287367702e-06, | |
| "loss": 0.0044, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 1.324538258575198, | |
| "grad_norm": 0.4022831618785858, | |
| "learning_rate": 4.868605121739261e-06, | |
| "loss": 0.0019, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 1.3324538258575198, | |
| "grad_norm": 0.35701292753219604, | |
| "learning_rate": 4.86474763309147e-06, | |
| "loss": 0.0048, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 1.3403693931398417, | |
| "grad_norm": 1.2709388732910156, | |
| "learning_rate": 4.8608359098943014e-06, | |
| "loss": 0.0073, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 1.3482849604221636, | |
| "grad_norm": 1.1490532159805298, | |
| "learning_rate": 4.856870041861576e-06, | |
| "loss": 0.0082, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 1.3562005277044855, | |
| "grad_norm": 1.9450149536132812, | |
| "learning_rate": 4.8528501199489045e-06, | |
| "loss": 0.0047, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 1.3641160949868074, | |
| "grad_norm": 0.2517525553703308, | |
| "learning_rate": 4.848776236351602e-06, | |
| "loss": 0.0101, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 1.3720316622691293, | |
| "grad_norm": 0.612678050994873, | |
| "learning_rate": 4.8446484845025754e-06, | |
| "loss": 0.0064, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 1.3799472295514512, | |
| "grad_norm": 0.8020515441894531, | |
| "learning_rate": 4.840466959070174e-06, | |
| "loss": 0.0077, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 1.3878627968337731, | |
| "grad_norm": 0.46455827355384827, | |
| "learning_rate": 4.836231755956028e-06, | |
| "loss": 0.0034, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 1.395778364116095, | |
| "grad_norm": 0.4173589050769806, | |
| "learning_rate": 4.83194297229284e-06, | |
| "loss": 0.0065, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 1.403693931398417, | |
| "grad_norm": 0.37849071621894836, | |
| "learning_rate": 4.827600706442164e-06, | |
| "loss": 0.0062, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 1.4116094986807388, | |
| "grad_norm": 0.6507642269134521, | |
| "learning_rate": 4.823205057992145e-06, | |
| "loss": 0.0054, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 1.4195250659630607, | |
| "grad_norm": 0.41411933302879333, | |
| "learning_rate": 4.8187561277552376e-06, | |
| "loss": 0.0056, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 1.4274406332453826, | |
| "grad_norm": 0.2742733359336853, | |
| "learning_rate": 4.8142540177658925e-06, | |
| "loss": 0.0022, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 1.4353562005277045, | |
| "grad_norm": 0.4650178849697113, | |
| "learning_rate": 4.809698831278217e-06, | |
| "loss": 0.0054, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 1.4432717678100264, | |
| "grad_norm": 0.2831101715564728, | |
| "learning_rate": 4.805090672763609e-06, | |
| "loss": 0.0041, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 1.4511873350923483, | |
| "grad_norm": 0.4332253634929657, | |
| "learning_rate": 4.800429647908354e-06, | |
| "loss": 0.0024, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 1.4591029023746702, | |
| "grad_norm": 0.13565762341022491, | |
| "learning_rate": 4.795715863611212e-06, | |
| "loss": 0.0025, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 1.4670184696569921, | |
| "grad_norm": 0.36338773369789124, | |
| "learning_rate": 4.790949427980956e-06, | |
| "loss": 0.0006, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 1.474934036939314, | |
| "grad_norm": 0.6935271620750427, | |
| "learning_rate": 4.786130450333897e-06, | |
| "loss": 0.0034, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 1.482849604221636, | |
| "grad_norm": 0.7220961451530457, | |
| "learning_rate": 4.7812590411913755e-06, | |
| "loss": 0.0041, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 1.4907651715039578, | |
| "grad_norm": 0.26949548721313477, | |
| "learning_rate": 4.77633531227723e-06, | |
| "loss": 0.0025, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 1.4986807387862797, | |
| "grad_norm": 0.6303775906562805, | |
| "learning_rate": 4.771359376515231e-06, | |
| "loss": 0.005, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 1.5065963060686016, | |
| "grad_norm": 0.7352619171142578, | |
| "learning_rate": 4.766331348026493e-06, | |
| "loss": 0.0061, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 1.5145118733509235, | |
| "grad_norm": 0.6918866038322449, | |
| "learning_rate": 4.7612513421268546e-06, | |
| "loss": 0.0056, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 1.5224274406332454, | |
| "grad_norm": 1.1295803785324097, | |
| "learning_rate": 4.756119475324237e-06, | |
| "loss": 0.0042, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 1.5303430079155673, | |
| "grad_norm": 0.4663296043872833, | |
| "learning_rate": 4.750935865315972e-06, | |
| "loss": 0.0026, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 1.5382585751978892, | |
| "grad_norm": 0.19712182879447937, | |
| "learning_rate": 4.745700630986097e-06, | |
| "loss": 0.0012, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 1.5461741424802111, | |
| "grad_norm": 0.322085440158844, | |
| "learning_rate": 4.740413892402639e-06, | |
| "loss": 0.0053, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 1.554089709762533, | |
| "grad_norm": 0.4675672650337219, | |
| "learning_rate": 4.73507577081485e-06, | |
| "loss": 0.0043, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 1.562005277044855, | |
| "grad_norm": 0.5347087979316711, | |
| "learning_rate": 4.7296863886504315e-06, | |
| "loss": 0.0063, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 1.5699208443271768, | |
| "grad_norm": 0.48838165402412415, | |
| "learning_rate": 4.7242458695127275e-06, | |
| "loss": 0.0029, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 1.5778364116094987, | |
| "grad_norm": 0.42908889055252075, | |
| "learning_rate": 4.718754338177887e-06, | |
| "loss": 0.0049, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 1.5857519788918206, | |
| "grad_norm": 0.2672640085220337, | |
| "learning_rate": 4.713211920592003e-06, | |
| "loss": 0.0025, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 1.5936675461741425, | |
| "grad_norm": 0.7370240092277527, | |
| "learning_rate": 4.707618743868226e-06, | |
| "loss": 0.0033, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 1.6015831134564644, | |
| "grad_norm": 0.3037024736404419, | |
| "learning_rate": 4.701974936283848e-06, | |
| "loss": 0.0028, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 1.6094986807387863, | |
| "grad_norm": 0.3099062442779541, | |
| "learning_rate": 4.696280627277356e-06, | |
| "loss": 0.0026, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 1.6174142480211082, | |
| "grad_norm": 0.16460958123207092, | |
| "learning_rate": 4.690535947445471e-06, | |
| "loss": 0.006, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 1.6253298153034301, | |
| "grad_norm": 0.43918171525001526, | |
| "learning_rate": 4.6847410285401465e-06, | |
| "loss": 0.0037, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 1.633245382585752, | |
| "grad_norm": 0.2510988414287567, | |
| "learning_rate": 4.67889600346555e-06, | |
| "loss": 0.0014, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 1.641160949868074, | |
| "grad_norm": 0.27360427379608154, | |
| "learning_rate": 4.673001006275013e-06, | |
| "loss": 0.0019, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 1.6490765171503958, | |
| "grad_norm": 0.16950200498104095, | |
| "learning_rate": 4.667056172167962e-06, | |
| "loss": 0.001, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 1.6569920844327177, | |
| "grad_norm": 0.27843964099884033, | |
| "learning_rate": 4.6610616374868066e-06, | |
| "loss": 0.0037, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 1.6649076517150396, | |
| "grad_norm": 0.5085106492042542, | |
| "learning_rate": 4.655017539713826e-06, | |
| "loss": 0.0028, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 1.6728232189973615, | |
| "grad_norm": 0.28183189034461975, | |
| "learning_rate": 4.648924017468003e-06, | |
| "loss": 0.0003, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 1.6807387862796834, | |
| "grad_norm": 0.38587337732315063, | |
| "learning_rate": 4.642781210501858e-06, | |
| "loss": 0.0052, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 1.6886543535620053, | |
| "grad_norm": 0.4533829987049103, | |
| "learning_rate": 4.6365892596982295e-06, | |
| "loss": 0.0036, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 1.6965699208443272, | |
| "grad_norm": 0.9035217761993408, | |
| "learning_rate": 4.6303483070670574e-06, | |
| "loss": 0.0022, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 1.7044854881266491, | |
| "grad_norm": 0.545513391494751, | |
| "learning_rate": 4.624058495742115e-06, | |
| "loss": 0.0019, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 1.712401055408971, | |
| "grad_norm": 0.09330940991640091, | |
| "learning_rate": 4.617719969977729e-06, | |
| "loss": 0.0027, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 1.720316622691293, | |
| "grad_norm": 0.4759463667869568, | |
| "learning_rate": 4.611332875145476e-06, | |
| "loss": 0.002, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 1.7282321899736148, | |
| "grad_norm": 0.2812918722629547, | |
| "learning_rate": 4.604897357730846e-06, | |
| "loss": 0.0033, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 1.7361477572559367, | |
| "grad_norm": 0.5131822228431702, | |
| "learning_rate": 4.598413565329876e-06, | |
| "loss": 0.0019, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 1.7440633245382586, | |
| "grad_norm": 0.17534379661083221, | |
| "learning_rate": 4.591881646645775e-06, | |
| "loss": 0.0014, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 1.7519788918205803, | |
| "grad_norm": 0.2808787524700165, | |
| "learning_rate": 4.585301751485508e-06, | |
| "loss": 0.0011, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 1.7598944591029024, | |
| "grad_norm": 0.1664104461669922, | |
| "learning_rate": 4.578674030756364e-06, | |
| "loss": 0.0013, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 1.767810026385224, | |
| "grad_norm": 0.11474815756082535, | |
| "learning_rate": 4.571998636462487e-06, | |
| "loss": 0.0003, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 1.7757255936675462, | |
| "grad_norm": 0.2994535267353058, | |
| "learning_rate": 4.5652757217014e-06, | |
| "loss": 0.0043, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 1.783641160949868, | |
| "grad_norm": 0.2962826192378998, | |
| "learning_rate": 4.5585054406604865e-06, | |
| "loss": 0.0012, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 1.79155672823219, | |
| "grad_norm": 0.10045085847377777, | |
| "learning_rate": 4.551687948613459e-06, | |
| "loss": 0.0016, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 1.7994722955145117, | |
| "grad_norm": 0.8167795538902283, | |
| "learning_rate": 4.544823401916794e-06, | |
| "loss": 0.0024, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 1.8073878627968338, | |
| "grad_norm": 0.562492847442627, | |
| "learning_rate": 4.537911958006149e-06, | |
| "loss": 0.0029, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 1.8153034300791555, | |
| "grad_norm": 0.12345612794160843, | |
| "learning_rate": 4.530953775392749e-06, | |
| "loss": 0.0006, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 1.8232189973614776, | |
| "grad_norm": 0.1261640191078186, | |
| "learning_rate": 4.523949013659754e-06, | |
| "loss": 0.0006, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 1.8311345646437993, | |
| "grad_norm": 0.2126503586769104, | |
| "learning_rate": 4.5168978334585955e-06, | |
| "loss": 0.0005, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 1.8390501319261214, | |
| "grad_norm": 0.23717330396175385, | |
| "learning_rate": 4.509800396505298e-06, | |
| "loss": 0.0019, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 1.8469656992084431, | |
| "grad_norm": 0.2814564108848572, | |
| "learning_rate": 4.502656865576762e-06, | |
| "loss": 0.0019, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 1.8548812664907652, | |
| "grad_norm": 0.12869513034820557, | |
| "learning_rate": 4.495467404507039e-06, | |
| "loss": 0.0007, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 1.862796833773087, | |
| "grad_norm": 0.45971599221229553, | |
| "learning_rate": 4.4882321781835666e-06, | |
| "loss": 0.0007, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 1.870712401055409, | |
| "grad_norm": 0.15887171030044556, | |
| "learning_rate": 4.4809513525433925e-06, | |
| "loss": 0.0009, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 1.8786279683377307, | |
| "grad_norm": 0.26040664315223694, | |
| "learning_rate": 4.473625094569366e-06, | |
| "loss": 0.0005, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 1.8865435356200528, | |
| "grad_norm": 0.6354907155036926, | |
| "learning_rate": 4.466253572286308e-06, | |
| "loss": 0.0012, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 1.8944591029023745, | |
| "grad_norm": 0.5247849225997925, | |
| "learning_rate": 4.458836954757161e-06, | |
| "loss": 0.0018, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 1.9023746701846966, | |
| "grad_norm": 0.14983093738555908, | |
| "learning_rate": 4.4513754120791065e-06, | |
| "loss": 0.0004, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 1.9102902374670183, | |
| "grad_norm": 0.1560702919960022, | |
| "learning_rate": 4.443869115379667e-06, | |
| "loss": 0.0011, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 1.9182058047493404, | |
| "grad_norm": 0.5521268248558044, | |
| "learning_rate": 4.436318236812782e-06, | |
| "loss": 0.0002, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 1.9261213720316621, | |
| "grad_norm": 0.34709402918815613, | |
| "learning_rate": 4.428722949554858e-06, | |
| "loss": 0.0009, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 1.9340369393139842, | |
| "grad_norm": 0.4721614122390747, | |
| "learning_rate": 4.421083427800795e-06, | |
| "loss": 0.0014, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 1.941952506596306, | |
| "grad_norm": 0.12338114529848099, | |
| "learning_rate": 4.413399846759998e-06, | |
| "loss": 0.0005, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 1.949868073878628, | |
| "grad_norm": 0.2648129463195801, | |
| "learning_rate": 4.405672382652349e-06, | |
| "loss": 0.0035, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 1.9577836411609497, | |
| "grad_norm": 0.41379258036613464, | |
| "learning_rate": 4.397901212704176e-06, | |
| "loss": 0.0001, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 1.9656992084432718, | |
| "grad_norm": 0.04220602661371231, | |
| "learning_rate": 4.390086515144179e-06, | |
| "loss": 0.0002, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 1.9736147757255935, | |
| "grad_norm": 0.04110397771000862, | |
| "learning_rate": 4.38222846919935e-06, | |
| "loss": 0.0001, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 1.9815303430079156, | |
| "grad_norm": 0.027100196108222008, | |
| "learning_rate": 4.3743272550908545e-06, | |
| "loss": 0.0001, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 1.9894459102902373, | |
| "grad_norm": 0.0841871052980423, | |
| "learning_rate": 4.366383054029907e-06, | |
| "loss": 0.0002, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 1.9973614775725594, | |
| "grad_norm": 0.038113586604595184, | |
| "learning_rate": 4.358396048213609e-06, | |
| "loss": 0.0002, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 0.038113586604595184, | |
| "learning_rate": 4.350366420820771e-06, | |
| "loss": 0.0001, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 2.007915567282322, | |
| "grad_norm": 0.1468001902103424, | |
| "learning_rate": 4.342294356007715e-06, | |
| "loss": 0.0003, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 2.015831134564644, | |
| "grad_norm": 0.1259719580411911, | |
| "learning_rate": 4.3341800389040465e-06, | |
| "loss": 0.0003, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 2.0237467018469655, | |
| "grad_norm": 0.042028915137052536, | |
| "learning_rate": 4.326023655608412e-06, | |
| "loss": 0.0, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 2.0316622691292876, | |
| "grad_norm": 0.007664548233151436, | |
| "learning_rate": 4.317825393184226e-06, | |
| "loss": 0.0, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 2.0395778364116097, | |
| "grad_norm": 0.007942886091768742, | |
| "learning_rate": 4.30958543965539e-06, | |
| "loss": 0.0, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 2.0474934036939314, | |
| "grad_norm": 0.010280617512762547, | |
| "learning_rate": 4.3013039840019675e-06, | |
| "loss": 0.0, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 2.055408970976253, | |
| "grad_norm": 0.007112486753612757, | |
| "learning_rate": 4.292981216155864e-06, | |
| "loss": 0.0, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 2.063324538258575, | |
| "grad_norm": 0.013413694687187672, | |
| "learning_rate": 4.284617326996458e-06, | |
| "loss": 0.0, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 2.0712401055408973, | |
| "grad_norm": 0.0064396364614367485, | |
| "learning_rate": 4.276212508346232e-06, | |
| "loss": 0.0, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 2.079155672823219, | |
| "grad_norm": 0.008679047226905823, | |
| "learning_rate": 4.267766952966369e-06, | |
| "loss": 0.0002, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 2.0870712401055407, | |
| "grad_norm": 0.06370187550783157, | |
| "learning_rate": 4.2592808545523335e-06, | |
| "loss": 0.0007, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 2.094986807387863, | |
| "grad_norm": 0.3114399313926697, | |
| "learning_rate": 4.250754407729428e-06, | |
| "loss": 0.0009, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 2.1029023746701845, | |
| "grad_norm": 0.38784605264663696, | |
| "learning_rate": 4.242187808048329e-06, | |
| "loss": 0.0, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 2.1108179419525066, | |
| "grad_norm": 0.013528178445994854, | |
| "learning_rate": 4.233581251980604e-06, | |
| "loss": 0.0009, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 2.1187335092348283, | |
| "grad_norm": 0.7630422711372375, | |
| "learning_rate": 4.224934936914206e-06, | |
| "loss": 0.0002, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 2.1266490765171504, | |
| "grad_norm": 0.06802177429199219, | |
| "learning_rate": 4.216249061148939e-06, | |
| "loss": 0.0, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 2.1345646437994725, | |
| "grad_norm": 0.015802081674337387, | |
| "learning_rate": 4.207523823891924e-06, | |
| "loss": 0.0008, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 2.142480211081794, | |
| "grad_norm": 0.6553952693939209, | |
| "learning_rate": 4.198759425253015e-06, | |
| "loss": 0.0, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 2.150395778364116, | |
| "grad_norm": 0.007900790311396122, | |
| "learning_rate": 4.1899560662402204e-06, | |
| "loss": 0.0001, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 2.158311345646438, | |
| "grad_norm": 0.058594342321157455, | |
| "learning_rate": 4.18111394875509e-06, | |
| "loss": 0.0, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 2.16622691292876, | |
| "grad_norm": 0.008023036643862724, | |
| "learning_rate": 4.172233275588082e-06, | |
| "loss": 0.0001, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 2.174142480211082, | |
| "grad_norm": 0.06547389924526215, | |
| "learning_rate": 4.163314250413913e-06, | |
| "loss": 0.0, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 2.1820580474934035, | |
| "grad_norm": 0.007858374156057835, | |
| "learning_rate": 4.154357077786892e-06, | |
| "loss": 0.0006, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 2.1899736147757256, | |
| "grad_norm": 0.2166021764278412, | |
| "learning_rate": 4.145361963136219e-06, | |
| "loss": 0.0, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 2.1978891820580473, | |
| "grad_norm": 0.36737874150276184, | |
| "learning_rate": 4.136329112761285e-06, | |
| "loss": 0.0004, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 2.2058047493403694, | |
| "grad_norm": 0.07245466858148575, | |
| "learning_rate": 4.127258733826929e-06, | |
| "loss": 0.0, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 2.213720316622691, | |
| "grad_norm": 0.029037440195679665, | |
| "learning_rate": 4.118151034358696e-06, | |
| "loss": 0.0, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 2.221635883905013, | |
| "grad_norm": 0.06319958716630936, | |
| "learning_rate": 4.109006223238064e-06, | |
| "loss": 0.0002, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 2.229551451187335, | |
| "grad_norm": 0.04793912172317505, | |
| "learning_rate": 4.0998245101976495e-06, | |
| "loss": 0.0, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 2.237467018469657, | |
| "grad_norm": 0.006495122332125902, | |
| "learning_rate": 4.0906061058164e-06, | |
| "loss": 0.0012, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 2.2453825857519787, | |
| "grad_norm": 0.14944440126419067, | |
| "learning_rate": 4.081351221514765e-06, | |
| "loss": 0.0001, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 2.253298153034301, | |
| "grad_norm": 0.05634067952632904, | |
| "learning_rate": 4.072060069549848e-06, | |
| "loss": 0.0, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 2.261213720316623, | |
| "grad_norm": 0.003317984053865075, | |
| "learning_rate": 4.062732863010534e-06, | |
| "loss": 0.0008, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 2.2691292875989446, | |
| "grad_norm": 0.10713385790586472, | |
| "learning_rate": 4.053369815812608e-06, | |
| "loss": 0.0001, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 2.2770448548812663, | |
| "grad_norm": 0.026916418224573135, | |
| "learning_rate": 4.043971142693845e-06, | |
| "loss": 0.0, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 2.2849604221635884, | |
| "grad_norm": 0.16971777379512787, | |
| "learning_rate": 4.034537059209085e-06, | |
| "loss": 0.0012, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 2.2928759894459105, | |
| "grad_norm": 0.2465554028749466, | |
| "learning_rate": 4.025067781725294e-06, | |
| "loss": 0.0009, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 2.300791556728232, | |
| "grad_norm": 0.017931131646037102, | |
| "learning_rate": 4.015563527416596e-06, | |
| "loss": 0.0019, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 2.308707124010554, | |
| "grad_norm": 0.5008033514022827, | |
| "learning_rate": 4.006024514259295e-06, | |
| "loss": 0.0016, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 2.316622691292876, | |
| "grad_norm": 0.2697279155254364, | |
| "learning_rate": 3.996450961026876e-06, | |
| "loss": 0.0003, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 2.324538258575198, | |
| "grad_norm": 0.04123712331056595, | |
| "learning_rate": 3.986843087284986e-06, | |
| "loss": 0.0, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 2.33245382585752, | |
| "grad_norm": 0.018080947920680046, | |
| "learning_rate": 3.977201113386402e-06, | |
| "loss": 0.0001, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 2.3403693931398415, | |
| "grad_norm": 0.1296033263206482, | |
| "learning_rate": 3.96752526046597e-06, | |
| "loss": 0.0022, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 2.3482849604221636, | |
| "grad_norm": 0.053319524973630905, | |
| "learning_rate": 3.957815750435542e-06, | |
| "loss": 0.0008, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 2.3562005277044857, | |
| "grad_norm": 0.48205140233039856, | |
| "learning_rate": 3.948072805978879e-06, | |
| "loss": 0.0001, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 2.3641160949868074, | |
| "grad_norm": 0.031876154243946075, | |
| "learning_rate": 3.938296650546552e-06, | |
| "loss": 0.0001, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 2.372031662269129, | |
| "grad_norm": 0.03231018781661987, | |
| "learning_rate": 3.928487508350808e-06, | |
| "loss": 0.0002, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 2.379947229551451, | |
| "grad_norm": 0.04064059630036354, | |
| "learning_rate": 3.918645604360434e-06, | |
| "loss": 0.0001, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 2.387862796833773, | |
| "grad_norm": 0.04606090486049652, | |
| "learning_rate": 3.908771164295595e-06, | |
| "loss": 0.0002, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 2.395778364116095, | |
| "grad_norm": 0.031620122492313385, | |
| "learning_rate": 3.898864414622661e-06, | |
| "loss": 0.0001, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 2.4036939313984167, | |
| "grad_norm": 0.03372185304760933, | |
| "learning_rate": 3.888925582549006e-06, | |
| "loss": 0.0001, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 2.411609498680739, | |
| "grad_norm": 0.025314636528491974, | |
| "learning_rate": 3.878954896017804e-06, | |
| "loss": 0.0003, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 2.4195250659630605, | |
| "grad_norm": 0.07136713713407516, | |
| "learning_rate": 3.868952583702798e-06, | |
| "loss": 0.0017, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 2.4274406332453826, | |
| "grad_norm": 0.1380709558725357, | |
| "learning_rate": 3.858918875003053e-06, | |
| "loss": 0.0001, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 2.4353562005277043, | |
| "grad_norm": 0.020328925922513008, | |
| "learning_rate": 3.848854000037702e-06, | |
| "loss": 0.0001, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 2.4432717678100264, | |
| "grad_norm": 0.08837846666574478, | |
| "learning_rate": 3.83875818964066e-06, | |
| "loss": 0.0001, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 2.451187335092348, | |
| "grad_norm": 0.03620443120598793, | |
| "learning_rate": 3.828631675355338e-06, | |
| "loss": 0.0001, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 2.45910290237467, | |
| "grad_norm": 0.01960093528032303, | |
| "learning_rate": 3.818474689429324e-06, | |
| "loss": 0.0001, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 2.467018469656992, | |
| "grad_norm": 0.1628216803073883, | |
| "learning_rate": 3.808287464809063e-06, | |
| "loss": 0.0025, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 2.474934036939314, | |
| "grad_norm": 0.19068323075771332, | |
| "learning_rate": 3.7980702351345146e-06, | |
| "loss": 0.0002, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 2.4828496042216357, | |
| "grad_norm": 0.06465128064155579, | |
| "learning_rate": 3.787823234733788e-06, | |
| "loss": 0.0001, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 2.490765171503958, | |
| "grad_norm": 0.016947569325566292, | |
| "learning_rate": 3.7775466986177763e-06, | |
| "loss": 0.0001, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 2.4986807387862795, | |
| "grad_norm": 0.010782090947031975, | |
| "learning_rate": 3.7672408624747598e-06, | |
| "loss": 0.0, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 2.5065963060686016, | |
| "grad_norm": 0.02703935280442238, | |
| "learning_rate": 3.756905962665005e-06, | |
| "loss": 0.0001, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 2.5145118733509237, | |
| "grad_norm": 0.010577079840004444, | |
| "learning_rate": 3.7465422362153416e-06, | |
| "loss": 0.0, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 2.5224274406332454, | |
| "grad_norm": 0.006201338488608599, | |
| "learning_rate": 3.736149920813726e-06, | |
| "loss": 0.0, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 2.530343007915567, | |
| "grad_norm": 0.12876874208450317, | |
| "learning_rate": 3.7257292548037917e-06, | |
| "loss": 0.0009, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 2.538258575197889, | |
| "grad_norm": 0.012635966762900352, | |
| "learning_rate": 3.715280477179382e-06, | |
| "loss": 0.0, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 2.5461741424802113, | |
| "grad_norm": 0.0092966603115201, | |
| "learning_rate": 3.7048038275790695e-06, | |
| "loss": 0.0, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 2.554089709762533, | |
| "grad_norm": 0.013077512383460999, | |
| "learning_rate": 3.6942995462806574e-06, | |
| "loss": 0.0, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 2.5620052770448547, | |
| "grad_norm": 0.01774498261511326, | |
| "learning_rate": 3.6837678741956747e-06, | |
| "loss": 0.0001, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 2.569920844327177, | |
| "grad_norm": 0.021662306040525436, | |
| "learning_rate": 3.6732090528638432e-06, | |
| "loss": 0.0, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 2.577836411609499, | |
| "grad_norm": 0.005042460281401873, | |
| "learning_rate": 3.6626233244475445e-06, | |
| "loss": 0.0001, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 2.5857519788918206, | |
| "grad_norm": 0.07442116737365723, | |
| "learning_rate": 3.6520109317262624e-06, | |
| "loss": 0.0, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 2.5936675461741423, | |
| "grad_norm": 0.005822429433465004, | |
| "learning_rate": 3.6413721180910165e-06, | |
| "loss": 0.0, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 2.6015831134564644, | |
| "grad_norm": 0.0028016779106110334, | |
| "learning_rate": 3.6307071275387807e-06, | |
| "loss": 0.0, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 2.6094986807387865, | |
| "grad_norm": 0.005941660143435001, | |
| "learning_rate": 3.6200162046668826e-06, | |
| "loss": 0.0, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 2.6174142480211082, | |
| "grad_norm": 0.007512587122619152, | |
| "learning_rate": 3.6092995946673996e-06, | |
| "loss": 0.0013, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 2.62532981530343, | |
| "grad_norm": 0.1878112256526947, | |
| "learning_rate": 3.5985575433215345e-06, | |
| "loss": 0.0, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 2.633245382585752, | |
| "grad_norm": 0.24462495744228363, | |
| "learning_rate": 3.587790296993976e-06, | |
| "loss": 0.0024, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 2.641160949868074, | |
| "grad_norm": 0.010779382660984993, | |
| "learning_rate": 3.5769981026272477e-06, | |
| "loss": 0.0001, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 2.649076517150396, | |
| "grad_norm": 0.024886395782232285, | |
| "learning_rate": 3.5661812077360496e-06, | |
| "loss": 0.0, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 2.6569920844327175, | |
| "grad_norm": 0.27707141637802124, | |
| "learning_rate": 3.5553398604015777e-06, | |
| "loss": 0.0014, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 2.6649076517150396, | |
| "grad_norm": 0.006285437382757664, | |
| "learning_rate": 3.544474309265834e-06, | |
| "loss": 0.0, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 2.6728232189973617, | |
| "grad_norm": 0.006901748012751341, | |
| "learning_rate": 3.5335848035259257e-06, | |
| "loss": 0.0, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 2.6807387862796834, | |
| "grad_norm": 0.005699906963855028, | |
| "learning_rate": 3.5226715929283507e-06, | |
| "loss": 0.0, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 2.688654353562005, | |
| "grad_norm": 0.010477579198777676, | |
| "learning_rate": 3.511734927763265e-06, | |
| "loss": 0.0, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 2.6965699208443272, | |
| "grad_norm": 0.0179861169308424, | |
| "learning_rate": 3.5007750588587495e-06, | |
| "loss": 0.0007, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 2.7044854881266494, | |
| "grad_norm": 0.1684534102678299, | |
| "learning_rate": 3.4897922375750517e-06, | |
| "loss": 0.0001, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 2.712401055408971, | |
| "grad_norm": 0.05259430781006813, | |
| "learning_rate": 3.478786715798823e-06, | |
| "loss": 0.0001, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 2.7203166226912927, | |
| "grad_norm": 0.6708176732063293, | |
| "learning_rate": 3.4677587459373417e-06, | |
| "loss": 0.0003, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 2.728232189973615, | |
| "grad_norm": 0.02362961135804653, | |
| "learning_rate": 3.4567085809127247e-06, | |
| "loss": 0.0, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 2.736147757255937, | |
| "grad_norm": 0.008437946438789368, | |
| "learning_rate": 3.4456364741561256e-06, | |
| "loss": 0.0, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 2.7440633245382586, | |
| "grad_norm": 0.011274375952780247, | |
| "learning_rate": 3.434542679601922e-06, | |
| "loss": 0.0023, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 2.7519788918205803, | |
| "grad_norm": 0.2040291279554367, | |
| "learning_rate": 3.423427451681895e-06, | |
| "loss": 0.0001, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 2.7598944591029024, | |
| "grad_norm": 0.01595059037208557, | |
| "learning_rate": 3.4122910453193885e-06, | |
| "loss": 0.0001, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 2.767810026385224, | |
| "grad_norm": 0.02352329157292843, | |
| "learning_rate": 3.4011337159234674e-06, | |
| "loss": 0.0012, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 2.7757255936675462, | |
| "grad_norm": 0.10779980570077896, | |
| "learning_rate": 3.3899557193830585e-06, | |
| "loss": 0.0003, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 2.783641160949868, | |
| "grad_norm": 0.08968038111925125, | |
| "learning_rate": 3.3787573120610794e-06, | |
| "loss": 0.0001, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 2.79155672823219, | |
| "grad_norm": 0.009699780493974686, | |
| "learning_rate": 3.367538750788563e-06, | |
| "loss": 0.0, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 2.7994722955145117, | |
| "grad_norm": 0.01223874930292368, | |
| "learning_rate": 3.356300292858763e-06, | |
| "loss": 0.0001, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 2.807387862796834, | |
| "grad_norm": 0.015586338005959988, | |
| "learning_rate": 3.345042196021257e-06, | |
| "loss": 0.0001, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 2.8153034300791555, | |
| "grad_norm": 0.10576637834310532, | |
| "learning_rate": 3.333764718476032e-06, | |
| "loss": 0.0011, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 2.8232189973614776, | |
| "grad_norm": 0.010167757980525494, | |
| "learning_rate": 3.3224681188675643e-06, | |
| "loss": 0.0, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 2.8311345646437993, | |
| "grad_norm": 0.09357836842536926, | |
| "learning_rate": 3.3111526562788864e-06, | |
| "loss": 0.0003, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 2.8390501319261214, | |
| "grad_norm": 0.013279908336699009, | |
| "learning_rate": 3.2998185902256475e-06, | |
| "loss": 0.0001, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 2.846965699208443, | |
| "grad_norm": 0.030286984518170357, | |
| "learning_rate": 3.2884661806501576e-06, | |
| "loss": 0.0001, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 2.8548812664907652, | |
| "grad_norm": 0.01646578684449196, | |
| "learning_rate": 3.2770956879154305e-06, | |
| "loss": 0.0001, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 2.862796833773087, | |
| "grad_norm": 0.018039187416434288, | |
| "learning_rate": 3.2657073727992078e-06, | |
| "loss": 0.0, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 2.870712401055409, | |
| "grad_norm": 0.009995294734835625, | |
| "learning_rate": 3.2543014964879814e-06, | |
| "loss": 0.0, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 2.8786279683377307, | |
| "grad_norm": 0.01269359327852726, | |
| "learning_rate": 3.2428783205710023e-06, | |
| "loss": 0.0001, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 2.886543535620053, | |
| "grad_norm": 0.01195498462766409, | |
| "learning_rate": 3.2314381070342815e-06, | |
| "loss": 0.0, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 2.8944591029023745, | |
| "grad_norm": 0.009492097422480583, | |
| "learning_rate": 3.21998111825458e-06, | |
| "loss": 0.0001, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 2.9023746701846966, | |
| "grad_norm": 0.012942278757691383, | |
| "learning_rate": 3.208507616993393e-06, | |
| "loss": 0.0001, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 2.9102902374670183, | |
| "grad_norm": 0.014576618559658527, | |
| "learning_rate": 3.1970178663909233e-06, | |
| "loss": 0.0, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 2.9182058047493404, | |
| "grad_norm": 0.014812547713518143, | |
| "learning_rate": 3.1855121299600454e-06, | |
| "loss": 0.0, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 2.926121372031662, | |
| "grad_norm": 0.005972759798169136, | |
| "learning_rate": 3.173990671580263e-06, | |
| "loss": 0.0008, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 2.9340369393139842, | |
| "grad_norm": 0.16172465682029724, | |
| "learning_rate": 3.162453755491655e-06, | |
| "loss": 0.0, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 2.941952506596306, | |
| "grad_norm": 0.005183232482522726, | |
| "learning_rate": 3.1509016462888175e-06, | |
| "loss": 0.0, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 2.949868073878628, | |
| "grad_norm": 0.013237228617072105, | |
| "learning_rate": 3.139334608914795e-06, | |
| "loss": 0.0001, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 2.9577836411609497, | |
| "grad_norm": 0.003913429565727711, | |
| "learning_rate": 3.1277529086550044e-06, | |
| "loss": 0.0, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 2.965699208443272, | |
| "grad_norm": 0.123182512819767, | |
| "learning_rate": 3.1161568111311487e-06, | |
| "loss": 0.0002, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 2.9736147757255935, | |
| "grad_norm": 0.007754236459732056, | |
| "learning_rate": 3.1045465822951265e-06, | |
| "loss": 0.0, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 2.9815303430079156, | |
| "grad_norm": 0.003976548556238413, | |
| "learning_rate": 3.092922488422933e-06, | |
| "loss": 0.0, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 2.9894459102902373, | |
| "grad_norm": 0.0028897474985569715, | |
| "learning_rate": 3.0812847961085527e-06, | |
| "loss": 0.0, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 2.9973614775725594, | |
| "grad_norm": 0.0054747299291193485, | |
| "learning_rate": 3.0696337722578444e-06, | |
| "loss": 0.0, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "grad_norm": 0.005969229619950056, | |
| "learning_rate": 3.057969684082421e-06, | |
| "loss": 0.0, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 3.007915567282322, | |
| "grad_norm": 0.004056141711771488, | |
| "learning_rate": 3.04629279909352e-06, | |
| "loss": 0.0, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 3.015831134564644, | |
| "grad_norm": 0.006289948709309101, | |
| "learning_rate": 3.0346033850958685e-06, | |
| "loss": 0.0, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 3.0237467018469655, | |
| "grad_norm": 0.06978850066661835, | |
| "learning_rate": 3.0229017101815424e-06, | |
| "loss": 0.0001, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 3.0316622691292876, | |
| "grad_norm": 0.003850232344120741, | |
| "learning_rate": 3.011188042723816e-06, | |
| "loss": 0.0, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 3.0395778364116097, | |
| "grad_norm": 0.006014230661094189, | |
| "learning_rate": 2.9994626513710085e-06, | |
| "loss": 0.0, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 3.0474934036939314, | |
| "grad_norm": 0.008263601921498775, | |
| "learning_rate": 2.9877258050403214e-06, | |
| "loss": 0.0, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 3.055408970976253, | |
| "grad_norm": 0.0018157489830628037, | |
| "learning_rate": 2.975977772911671e-06, | |
| "loss": 0.0, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 3.063324538258575, | |
| "grad_norm": 0.0012247132835909724, | |
| "learning_rate": 2.964218824421518e-06, | |
| "loss": 0.0, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 3.0712401055408973, | |
| "grad_norm": 0.005896579474210739, | |
| "learning_rate": 2.9524492292566824e-06, | |
| "loss": 0.0, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 3.079155672823219, | |
| "grad_norm": 0.0017256075516343117, | |
| "learning_rate": 2.9406692573481634e-06, | |
| "loss": 0.0, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 3.0870712401055407, | |
| "grad_norm": 0.001976009923964739, | |
| "learning_rate": 2.928879178864946e-06, | |
| "loss": 0.0, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 3.094986807387863, | |
| "grad_norm": 0.31570467352867126, | |
| "learning_rate": 2.9170792642078057e-06, | |
| "loss": 0.0026, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 3.1029023746701845, | |
| "grad_norm": 0.004713984671980143, | |
| "learning_rate": 2.9052697840031065e-06, | |
| "loss": 0.0, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 3.1108179419525066, | |
| "grad_norm": 0.135233074426651, | |
| "learning_rate": 2.8934510090965943e-06, | |
| "loss": 0.0005, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 3.1187335092348283, | |
| "grad_norm": 0.0030627846717834473, | |
| "learning_rate": 2.8816232105471864e-06, | |
| "loss": 0.0, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 3.1266490765171504, | |
| "grad_norm": 0.005015626084059477, | |
| "learning_rate": 2.8697866596207524e-06, | |
| "loss": 0.0, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 3.1345646437994725, | |
| "grad_norm": 0.0035850070416927338, | |
| "learning_rate": 2.8579416277838952e-06, | |
| "loss": 0.0, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 3.142480211081794, | |
| "grad_norm": 0.007022330071777105, | |
| "learning_rate": 2.846088386697723e-06, | |
| "loss": 0.0, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 3.150395778364116, | |
| "grad_norm": 0.003523892490193248, | |
| "learning_rate": 2.8342272082116214e-06, | |
| "loss": 0.0, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 3.158311345646438, | |
| "grad_norm": 0.04955633729696274, | |
| "learning_rate": 2.822358364357015e-06, | |
| "loss": 0.0001, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 3.16622691292876, | |
| "grad_norm": 0.006145668216049671, | |
| "learning_rate": 2.8104821273411333e-06, | |
| "loss": 0.0, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 3.174142480211082, | |
| "grad_norm": 0.00394571665674448, | |
| "learning_rate": 2.7985987695407618e-06, | |
| "loss": 0.0, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 3.1820580474934035, | |
| "grad_norm": 0.003726326860487461, | |
| "learning_rate": 2.786708563496002e-06, | |
| "loss": 0.0, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 3.1899736147757256, | |
| "grad_norm": 0.003756965510547161, | |
| "learning_rate": 2.774811781904013e-06, | |
| "loss": 0.0, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 3.1978891820580473, | |
| "grad_norm": 0.008439401164650917, | |
| "learning_rate": 2.762908697612765e-06, | |
| "loss": 0.0, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 3.2058047493403694, | |
| "grad_norm": 0.0023035435006022453, | |
| "learning_rate": 2.750999583614777e-06, | |
| "loss": 0.0, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 3.213720316622691, | |
| "grad_norm": 0.003911241423338652, | |
| "learning_rate": 2.739084713040856e-06, | |
| "loss": 0.0, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 3.221635883905013, | |
| "grad_norm": 0.030907543376088142, | |
| "learning_rate": 2.7271643591538355e-06, | |
| "loss": 0.0001, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 3.229551451187335, | |
| "grad_norm": 0.004778213333338499, | |
| "learning_rate": 2.7152387953423047e-06, | |
| "loss": 0.0, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 3.237467018469657, | |
| "grad_norm": 0.007156203966587782, | |
| "learning_rate": 2.703308295114342e-06, | |
| "loss": 0.0, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 3.2453825857519787, | |
| "grad_norm": 0.003489116206765175, | |
| "learning_rate": 2.69137313209124e-06, | |
| "loss": 0.0, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 3.253298153034301, | |
| "grad_norm": 0.004960685037076473, | |
| "learning_rate": 2.6794335800012294e-06, | |
| "loss": 0.0, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 3.261213720316623, | |
| "grad_norm": 0.004319795873016119, | |
| "learning_rate": 2.6674899126732045e-06, | |
| "loss": 0.0, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 3.2691292875989446, | |
| "grad_norm": 0.007416910026222467, | |
| "learning_rate": 2.65554240403044e-06, | |
| "loss": 0.0, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 3.2770448548812663, | |
| "grad_norm": 0.00757510494440794, | |
| "learning_rate": 2.643591328084309e-06, | |
| "loss": 0.0, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 3.2849604221635884, | |
| "grad_norm": 0.006031471770256758, | |
| "learning_rate": 2.631636958928e-06, | |
| "loss": 0.0, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 3.2928759894459105, | |
| "grad_norm": 0.025160983204841614, | |
| "learning_rate": 2.6196795707302304e-06, | |
| "loss": 0.0001, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 3.300791556728232, | |
| "grad_norm": 0.002678148215636611, | |
| "learning_rate": 2.607719437728957e-06, | |
| "loss": 0.0, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 3.308707124010554, | |
| "grad_norm": 0.0023166383616626263, | |
| "learning_rate": 2.595756834225089e-06, | |
| "loss": 0.0, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 3.316622691292876, | |
| "grad_norm": 0.004792348947376013, | |
| "learning_rate": 2.583792034576194e-06, | |
| "loss": 0.0, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 3.324538258575198, | |
| "grad_norm": 0.002316155703738332, | |
| "learning_rate": 2.5718253131902084e-06, | |
| "loss": 0.0, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 3.33245382585752, | |
| "grad_norm": 0.002854425460100174, | |
| "learning_rate": 2.5598569445191418e-06, | |
| "loss": 0.0, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 3.3403693931398415, | |
| "grad_norm": 0.0025122410152107477, | |
| "learning_rate": 2.547887203052786e-06, | |
| "loss": 0.0, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 3.3482849604221636, | |
| "grad_norm": 0.007665908429771662, | |
| "learning_rate": 2.535916363312414e-06, | |
| "loss": 0.0, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 3.3562005277044857, | |
| "grad_norm": 0.0029636449180543423, | |
| "learning_rate": 2.52394469984449e-06, | |
| "loss": 0.0, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 3.3641160949868074, | |
| "grad_norm": 0.003854286391288042, | |
| "learning_rate": 2.5119724872143693e-06, | |
| "loss": 0.0, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 3.372031662269129, | |
| "grad_norm": 0.01237494871020317, | |
| "learning_rate": 2.5e-06, | |
| "loss": 0.0, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 3.379947229551451, | |
| "grad_norm": 0.005211927928030491, | |
| "learning_rate": 2.4880275127856324e-06, | |
| "loss": 0.0, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 3.387862796833773, | |
| "grad_norm": 0.008481196127831936, | |
| "learning_rate": 2.4760553001555105e-06, | |
| "loss": 0.0, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 3.395778364116095, | |
| "grad_norm": 0.0037402070593088865, | |
| "learning_rate": 2.4640836366875872e-06, | |
| "loss": 0.0, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 3.4036939313984167, | |
| "grad_norm": 0.002274413825944066, | |
| "learning_rate": 2.452112796947215e-06, | |
| "loss": 0.0, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 3.411609498680739, | |
| "grad_norm": 0.003822160651907325, | |
| "learning_rate": 2.440143055480859e-06, | |
| "loss": 0.0, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 3.4195250659630605, | |
| "grad_norm": 0.0016057207249104977, | |
| "learning_rate": 2.428174686809793e-06, | |
| "loss": 0.0, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 3.4274406332453826, | |
| "grad_norm": 0.0016935165040194988, | |
| "learning_rate": 2.4162079654238073e-06, | |
| "loss": 0.0, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 3.4353562005277043, | |
| "grad_norm": 0.0018719176296144724, | |
| "learning_rate": 2.404243165774912e-06, | |
| "loss": 0.0, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 3.4432717678100264, | |
| "grad_norm": 0.0013446552911773324, | |
| "learning_rate": 2.392280562271044e-06, | |
| "loss": 0.0, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 3.451187335092348, | |
| "grad_norm": 0.0062268865294754505, | |
| "learning_rate": 2.3803204292697705e-06, | |
| "loss": 0.0, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 3.45910290237467, | |
| "grad_norm": 0.0036832697223871946, | |
| "learning_rate": 2.3683630410720013e-06, | |
| "loss": 0.0, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 3.467018469656992, | |
| "grad_norm": 0.0031680618412792683, | |
| "learning_rate": 2.356408671915692e-06, | |
| "loss": 0.0, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 3.474934036939314, | |
| "grad_norm": 0.00204239459708333, | |
| "learning_rate": 2.3444575959695615e-06, | |
| "loss": 0.0, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 3.4828496042216357, | |
| "grad_norm": 0.002014332450926304, | |
| "learning_rate": 2.3325100873267963e-06, | |
| "loss": 0.0, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 3.490765171503958, | |
| "grad_norm": 0.004848967771977186, | |
| "learning_rate": 2.320566419998772e-06, | |
| "loss": 0.0, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 3.4986807387862795, | |
| "grad_norm": 0.0017829296411946416, | |
| "learning_rate": 2.308626867908761e-06, | |
| "loss": 0.0, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 3.5065963060686016, | |
| "grad_norm": 0.046102311462163925, | |
| "learning_rate": 2.296691704885659e-06, | |
| "loss": 0.0001, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 3.5145118733509237, | |
| "grad_norm": 0.0010413214331492782, | |
| "learning_rate": 2.284761204657696e-06, | |
| "loss": 0.0, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 3.5224274406332454, | |
| "grad_norm": 0.0014018567744642496, | |
| "learning_rate": 2.2728356408461653e-06, | |
| "loss": 0.0, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 3.530343007915567, | |
| "grad_norm": 0.001453349948860705, | |
| "learning_rate": 2.2609152869591445e-06, | |
| "loss": 0.0, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 3.538258575197889, | |
| "grad_norm": 0.0017676043789833784, | |
| "learning_rate": 2.249000416385224e-06, | |
| "loss": 0.0, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 3.5461741424802113, | |
| "grad_norm": 0.002675161464139819, | |
| "learning_rate": 2.2370913023872357e-06, | |
| "loss": 0.0, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 3.554089709762533, | |
| "grad_norm": 0.0015697075286880136, | |
| "learning_rate": 2.2251882180959876e-06, | |
| "loss": 0.0, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 3.5620052770448547, | |
| "grad_norm": 0.0011259898310527205, | |
| "learning_rate": 2.2132914365039993e-06, | |
| "loss": 0.0, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 3.569920844327177, | |
| "grad_norm": 0.002902733162045479, | |
| "learning_rate": 2.201401230459239e-06, | |
| "loss": 0.0, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 3.577836411609499, | |
| "grad_norm": 0.0017832021694630384, | |
| "learning_rate": 2.189517872658867e-06, | |
| "loss": 0.0, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 3.5857519788918206, | |
| "grad_norm": 0.0015155959408730268, | |
| "learning_rate": 2.1776416356429857e-06, | |
| "loss": 0.0, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 3.5936675461741423, | |
| "grad_norm": 0.000976615003310144, | |
| "learning_rate": 2.165772791788379e-06, | |
| "loss": 0.0, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 3.6015831134564644, | |
| "grad_norm": 0.020495692268013954, | |
| "learning_rate": 2.1539116133022776e-06, | |
| "loss": 0.0, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 3.6094986807387865, | |
| "grad_norm": 0.0031040646135807037, | |
| "learning_rate": 2.142058372216105e-06, | |
| "loss": 0.0, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 3.6174142480211082, | |
| "grad_norm": 0.003270247019827366, | |
| "learning_rate": 2.130213340379248e-06, | |
| "loss": 0.0, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 3.62532981530343, | |
| "grad_norm": 0.003336927155032754, | |
| "learning_rate": 2.1183767894528135e-06, | |
| "loss": 0.0, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 3.633245382585752, | |
| "grad_norm": 0.0010945361573249102, | |
| "learning_rate": 2.1065489909034065e-06, | |
| "loss": 0.0, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 3.641160949868074, | |
| "grad_norm": 0.0009257032070308924, | |
| "learning_rate": 2.094730215996894e-06, | |
| "loss": 0.0, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 3.649076517150396, | |
| "grad_norm": 0.001292677130550146, | |
| "learning_rate": 2.082920735792195e-06, | |
| "loss": 0.0, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 3.6569920844327175, | |
| "grad_norm": 0.02042875811457634, | |
| "learning_rate": 2.0711208211350543e-06, | |
| "loss": 0.0, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 3.6649076517150396, | |
| "grad_norm": 0.14115594327449799, | |
| "learning_rate": 2.059330742651837e-06, | |
| "loss": 0.0003, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 3.6728232189973617, | |
| "grad_norm": 0.0009795207297429442, | |
| "learning_rate": 2.047550770743318e-06, | |
| "loss": 0.0, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 3.6807387862796834, | |
| "grad_norm": 0.005499033723026514, | |
| "learning_rate": 2.035781175578483e-06, | |
| "loss": 0.0, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 3.688654353562005, | |
| "grad_norm": 0.0013082363875582814, | |
| "learning_rate": 2.024022227088329e-06, | |
| "loss": 0.0, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 3.6965699208443272, | |
| "grad_norm": 0.0016859848983585835, | |
| "learning_rate": 2.01227419495968e-06, | |
| "loss": 0.0, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 3.7044854881266494, | |
| "grad_norm": 0.0014348351396620274, | |
| "learning_rate": 2.0005373486289932e-06, | |
| "loss": 0.0, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 3.712401055408971, | |
| "grad_norm": 0.0014519428368657827, | |
| "learning_rate": 1.988811957276185e-06, | |
| "loss": 0.0, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 3.7203166226912927, | |
| "grad_norm": 0.0020222277380526066, | |
| "learning_rate": 1.977098289818459e-06, | |
| "loss": 0.0, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 3.728232189973615, | |
| "grad_norm": 0.0014894002815708518, | |
| "learning_rate": 1.9653966149041323e-06, | |
| "loss": 0.0, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 3.736147757255937, | |
| "grad_norm": 0.0010474161244928837, | |
| "learning_rate": 1.9537072009064814e-06, | |
| "loss": 0.0, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 3.7440633245382586, | |
| "grad_norm": 0.00139061629306525, | |
| "learning_rate": 1.9420303159175795e-06, | |
| "loss": 0.0, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 3.7519788918205803, | |
| "grad_norm": 0.003557840595021844, | |
| "learning_rate": 1.930366227742157e-06, | |
| "loss": 0.0, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 3.7598944591029024, | |
| "grad_norm": 0.0006267677526921034, | |
| "learning_rate": 1.918715203891448e-06, | |
| "loss": 0.0, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 3.767810026385224, | |
| "grad_norm": 0.009051766246557236, | |
| "learning_rate": 1.907077511577068e-06, | |
| "loss": 0.0, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 3.7757255936675462, | |
| "grad_norm": 0.0014555881498381495, | |
| "learning_rate": 1.8954534177048744e-06, | |
| "loss": 0.0, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 3.783641160949868, | |
| "grad_norm": 0.002180263167247176, | |
| "learning_rate": 1.8838431888688528e-06, | |
| "loss": 0.0, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 3.79155672823219, | |
| "grad_norm": 0.0008298380998894572, | |
| "learning_rate": 1.8722470913449962e-06, | |
| "loss": 0.0, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 3.7994722955145117, | |
| "grad_norm": 0.08523594588041306, | |
| "learning_rate": 1.8606653910852058e-06, | |
| "loss": 0.0002, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 3.807387862796834, | |
| "grad_norm": 0.0013316926779225469, | |
| "learning_rate": 1.8490983537111831e-06, | |
| "loss": 0.0, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 3.8153034300791555, | |
| "grad_norm": 0.0014392542652785778, | |
| "learning_rate": 1.8375462445083464e-06, | |
| "loss": 0.0, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 3.8232189973614776, | |
| "grad_norm": 0.0012960598105564713, | |
| "learning_rate": 1.8260093284197371e-06, | |
| "loss": 0.0, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 3.8311345646437993, | |
| "grad_norm": 0.0018995273858308792, | |
| "learning_rate": 1.814487870039955e-06, | |
| "loss": 0.0, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 3.8390501319261214, | |
| "grad_norm": 0.0011370782740414143, | |
| "learning_rate": 1.8029821336090769e-06, | |
| "loss": 0.0, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 3.846965699208443, | |
| "grad_norm": 0.001328559941612184, | |
| "learning_rate": 1.7914923830066074e-06, | |
| "loss": 0.0, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 3.8548812664907652, | |
| "grad_norm": 0.006753549445420504, | |
| "learning_rate": 1.7800188817454209e-06, | |
| "loss": 0.0, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 3.862796833773087, | |
| "grad_norm": 0.0009229669230990112, | |
| "learning_rate": 1.7685618929657193e-06, | |
| "loss": 0.0, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 3.870712401055409, | |
| "grad_norm": 0.0008038815576583147, | |
| "learning_rate": 1.7571216794289985e-06, | |
| "loss": 0.0, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 3.8786279683377307, | |
| "grad_norm": 0.0008389101712964475, | |
| "learning_rate": 1.7456985035120194e-06, | |
| "loss": 0.0, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 3.886543535620053, | |
| "grad_norm": 0.0009506504866294563, | |
| "learning_rate": 1.734292627200793e-06, | |
| "loss": 0.0, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 3.8944591029023745, | |
| "grad_norm": 0.0009613066795282066, | |
| "learning_rate": 1.7229043120845705e-06, | |
| "loss": 0.0, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 3.9023746701846966, | |
| "grad_norm": 0.050488874316215515, | |
| "learning_rate": 1.711533819349842e-06, | |
| "loss": 0.0001, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 3.9102902374670183, | |
| "grad_norm": 0.0020255844574421644, | |
| "learning_rate": 1.700181409774353e-06, | |
| "loss": 0.0, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 3.9182058047493404, | |
| "grad_norm": 0.0408753827214241, | |
| "learning_rate": 1.6888473437211134e-06, | |
| "loss": 0.0001, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 3.926121372031662, | |
| "grad_norm": 0.0009621501667425036, | |
| "learning_rate": 1.6775318811324365e-06, | |
| "loss": 0.0, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 3.9340369393139842, | |
| "grad_norm": 0.00070030870847404, | |
| "learning_rate": 1.666235281523968e-06, | |
| "loss": 0.0, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 3.941952506596306, | |
| "grad_norm": 0.0018594212597236037, | |
| "learning_rate": 1.6549578039787436e-06, | |
| "loss": 0.0, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 3.949868073878628, | |
| "grad_norm": 0.0013388278894126415, | |
| "learning_rate": 1.643699707141237e-06, | |
| "loss": 0.0, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 3.9577836411609497, | |
| "grad_norm": 0.0010811896063387394, | |
| "learning_rate": 1.6324612492114378e-06, | |
| "loss": 0.0, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 3.965699208443272, | |
| "grad_norm": 0.001241902238689363, | |
| "learning_rate": 1.6212426879389205e-06, | |
| "loss": 0.0, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 3.9736147757255935, | |
| "grad_norm": 0.0013246425660327077, | |
| "learning_rate": 1.6100442806169423e-06, | |
| "loss": 0.0, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 3.9815303430079156, | |
| "grad_norm": 0.0019326942274346948, | |
| "learning_rate": 1.5988662840765323e-06, | |
| "loss": 0.0, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 3.9894459102902373, | |
| "grad_norm": 0.004760990850627422, | |
| "learning_rate": 1.5877089546806123e-06, | |
| "loss": 0.0, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 3.9973614775725594, | |
| "grad_norm": 0.0012298509245738387, | |
| "learning_rate": 1.5765725483181056e-06, | |
| "loss": 0.0, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "grad_norm": 0.0012298509245738387, | |
| "learning_rate": 1.5654573203980782e-06, | |
| "loss": 0.0, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 4.007915567282322, | |
| "grad_norm": 0.0017885937122628093, | |
| "learning_rate": 1.5543635258438746e-06, | |
| "loss": 0.0, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 4.015831134564644, | |
| "grad_norm": 0.0006545423530042171, | |
| "learning_rate": 1.5432914190872757e-06, | |
| "loss": 0.0, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 4.0237467018469655, | |
| "grad_norm": 0.0009681995725259185, | |
| "learning_rate": 1.5322412540626593e-06, | |
| "loss": 0.0, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 4.031662269129288, | |
| "grad_norm": 0.0007468878757208586, | |
| "learning_rate": 1.5212132842011778e-06, | |
| "loss": 0.0, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 4.03957783641161, | |
| "grad_norm": 0.0014883131952956319, | |
| "learning_rate": 1.5102077624249498e-06, | |
| "loss": 0.0, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 4.047493403693931, | |
| "grad_norm": 0.01504430454224348, | |
| "learning_rate": 1.4992249411412514e-06, | |
| "loss": 0.0, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 4.055408970976253, | |
| "grad_norm": 0.0009686205303296447, | |
| "learning_rate": 1.4882650722367365e-06, | |
| "loss": 0.0, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 4.063324538258575, | |
| "grad_norm": 0.002122383564710617, | |
| "learning_rate": 1.4773284070716504e-06, | |
| "loss": 0.0, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 4.071240105540897, | |
| "grad_norm": 0.0020355628803372383, | |
| "learning_rate": 1.4664151964740754e-06, | |
| "loss": 0.0, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 4.0791556728232194, | |
| "grad_norm": 0.0009121248149313033, | |
| "learning_rate": 1.4555256907341668e-06, | |
| "loss": 0.0, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 4.087071240105541, | |
| "grad_norm": 0.0007165389833971858, | |
| "learning_rate": 1.4446601395984233e-06, | |
| "loss": 0.0, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 4.094986807387863, | |
| "grad_norm": 0.0012207115069031715, | |
| "learning_rate": 1.4338187922639506e-06, | |
| "loss": 0.0, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 4.102902374670185, | |
| "grad_norm": 0.0010161465033888817, | |
| "learning_rate": 1.4230018973727535e-06, | |
| "loss": 0.0, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 4.110817941952506, | |
| "grad_norm": 0.0013537887716665864, | |
| "learning_rate": 1.412209703006025e-06, | |
| "loss": 0.0, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 4.118733509234828, | |
| "grad_norm": 0.0006420905701816082, | |
| "learning_rate": 1.4014424566784663e-06, | |
| "loss": 0.0, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 4.12664907651715, | |
| "grad_norm": 0.0018021538853645325, | |
| "learning_rate": 1.3907004053326006e-06, | |
| "loss": 0.0, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 4.1345646437994725, | |
| "grad_norm": 0.0007370368111878633, | |
| "learning_rate": 1.3799837953331191e-06, | |
| "loss": 0.0, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 4.142480211081795, | |
| "grad_norm": 0.0012738363584503531, | |
| "learning_rate": 1.3692928724612204e-06, | |
| "loss": 0.0, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 4.150395778364116, | |
| "grad_norm": 0.0009869966888800263, | |
| "learning_rate": 1.3586278819089837e-06, | |
| "loss": 0.0, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 4.158311345646438, | |
| "grad_norm": 0.0008315503946505487, | |
| "learning_rate": 1.347989068273738e-06, | |
| "loss": 0.0, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 4.16622691292876, | |
| "grad_norm": 0.0016615035710856318, | |
| "learning_rate": 1.3373766755524564e-06, | |
| "loss": 0.0, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 4.174142480211081, | |
| "grad_norm": 0.000622150837443769, | |
| "learning_rate": 1.3267909471361574e-06, | |
| "loss": 0.0, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 4.1820580474934035, | |
| "grad_norm": 0.00513397017493844, | |
| "learning_rate": 1.3162321258043261e-06, | |
| "loss": 0.0, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 4.189973614775726, | |
| "grad_norm": 0.0007064202800393105, | |
| "learning_rate": 1.3057004537193424e-06, | |
| "loss": 0.0, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 4.197889182058048, | |
| "grad_norm": 0.0007592473411932588, | |
| "learning_rate": 1.2951961724209317e-06, | |
| "loss": 0.0, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 4.205804749340369, | |
| "grad_norm": 0.0009417333058081567, | |
| "learning_rate": 1.284719522820618e-06, | |
| "loss": 0.0, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 4.213720316622691, | |
| "grad_norm": 0.0006435943651013076, | |
| "learning_rate": 1.274270745196209e-06, | |
| "loss": 0.0, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 4.221635883905013, | |
| "grad_norm": 0.0020621689036488533, | |
| "learning_rate": 1.263850079186274e-06, | |
| "loss": 0.0, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 4.229551451187335, | |
| "grad_norm": 0.0010798608418554068, | |
| "learning_rate": 1.253457763784659e-06, | |
| "loss": 0.0, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 4.237467018469657, | |
| "grad_norm": 0.0010206660954281688, | |
| "learning_rate": 1.2430940373349944e-06, | |
| "loss": 0.0, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 4.245382585751979, | |
| "grad_norm": 0.010580914095044136, | |
| "learning_rate": 1.2327591375252404e-06, | |
| "loss": 0.0, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 4.253298153034301, | |
| "grad_norm": 0.0010622036643326283, | |
| "learning_rate": 1.2224533013822237e-06, | |
| "loss": 0.0, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 4.261213720316623, | |
| "grad_norm": 0.0011575209209695458, | |
| "learning_rate": 1.2121767652662122e-06, | |
| "loss": 0.0, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 4.269129287598945, | |
| "grad_norm": 0.0011635952396318316, | |
| "learning_rate": 1.2019297648654856e-06, | |
| "loss": 0.0, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 4.277044854881266, | |
| "grad_norm": 0.001950603211298585, | |
| "learning_rate": 1.191712535190937e-06, | |
| "loss": 0.0, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 4.284960422163588, | |
| "grad_norm": 0.0012599063338711858, | |
| "learning_rate": 1.181525310570677e-06, | |
| "loss": 0.0, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 4.2928759894459105, | |
| "grad_norm": 0.0011667741928249598, | |
| "learning_rate": 1.1713683246446622e-06, | |
| "loss": 0.0, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 4.300791556728232, | |
| "grad_norm": 0.15335267782211304, | |
| "learning_rate": 1.16124181035934e-06, | |
| "loss": 0.0004, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 4.308707124010554, | |
| "grad_norm": 0.003990268334746361, | |
| "learning_rate": 1.1511459999622982e-06, | |
| "loss": 0.0, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 4.316622691292876, | |
| "grad_norm": 0.0007785047055222094, | |
| "learning_rate": 1.1410811249969475e-06, | |
| "loss": 0.0, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 4.324538258575198, | |
| "grad_norm": 0.00136755860876292, | |
| "learning_rate": 1.1310474162972026e-06, | |
| "loss": 0.0, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 4.33245382585752, | |
| "grad_norm": 0.0006973014096729457, | |
| "learning_rate": 1.1210451039821965e-06, | |
| "loss": 0.0, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 4.3403693931398415, | |
| "grad_norm": 0.0012636622413992882, | |
| "learning_rate": 1.1110744174509952e-06, | |
| "loss": 0.0, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 4.348284960422164, | |
| "grad_norm": 0.0014565171441063285, | |
| "learning_rate": 1.10113558537734e-06, | |
| "loss": 0.0, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 4.356200527704486, | |
| "grad_norm": 0.0013900294434279203, | |
| "learning_rate": 1.0912288357044063e-06, | |
| "loss": 0.0001, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 4.364116094986807, | |
| "grad_norm": 0.04637804627418518, | |
| "learning_rate": 1.0813543956395675e-06, | |
| "loss": 0.0, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 4.372031662269129, | |
| "grad_norm": 0.001281993929296732, | |
| "learning_rate": 1.0715124916491937e-06, | |
| "loss": 0.0, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 4.379947229551451, | |
| "grad_norm": 0.0009187161922454834, | |
| "learning_rate": 1.0617033494534486e-06, | |
| "loss": 0.0, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 4.387862796833773, | |
| "grad_norm": 0.0020851644221693277, | |
| "learning_rate": 1.0519271940211214e-06, | |
| "loss": 0.0, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 4.395778364116095, | |
| "grad_norm": 0.0010528352577239275, | |
| "learning_rate": 1.0421842495644588e-06, | |
| "loss": 0.0, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 4.403693931398417, | |
| "grad_norm": 0.0048421346582472324, | |
| "learning_rate": 1.032474739534031e-06, | |
| "loss": 0.0, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 4.411609498680739, | |
| "grad_norm": 0.0012176426826044917, | |
| "learning_rate": 1.0227988866135995e-06, | |
| "loss": 0.0, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 4.419525065963061, | |
| "grad_norm": 0.001685833791270852, | |
| "learning_rate": 1.0131569127150143e-06, | |
| "loss": 0.0, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 4.427440633245382, | |
| "grad_norm": 0.0013779419241473079, | |
| "learning_rate": 1.0035490389731257e-06, | |
| "loss": 0.0, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 4.435356200527704, | |
| "grad_norm": 0.00110618956387043, | |
| "learning_rate": 9.939754857407064e-07, | |
| "loss": 0.0, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 4.443271767810026, | |
| "grad_norm": 0.001307065598666668, | |
| "learning_rate": 9.844364725834058e-07, | |
| "loss": 0.0, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 4.4511873350923485, | |
| "grad_norm": 0.000743210781365633, | |
| "learning_rate": 9.749322182747074e-07, | |
| "loss": 0.0, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 4.45910290237467, | |
| "grad_norm": 0.0010771660599857569, | |
| "learning_rate": 9.654629407909163e-07, | |
| "loss": 0.0, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 4.467018469656992, | |
| "grad_norm": 0.002285173861309886, | |
| "learning_rate": 9.560288573061562e-07, | |
| "loss": 0.0, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 4.474934036939314, | |
| "grad_norm": 0.0006983516504988074, | |
| "learning_rate": 9.466301841873929e-07, | |
| "loss": 0.0, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 4.482849604221636, | |
| "grad_norm": 0.0008840264054015279, | |
| "learning_rate": 9.372671369894662e-07, | |
| "loss": 0.0, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 4.490765171503957, | |
| "grad_norm": 0.0015082739992067218, | |
| "learning_rate": 9.279399304501526e-07, | |
| "loss": 0.0, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 4.4986807387862795, | |
| "grad_norm": 0.0005543252336792648, | |
| "learning_rate": 9.186487784852349e-07, | |
| "loss": 0.0, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 4.506596306068602, | |
| "grad_norm": 0.0008211376261897385, | |
| "learning_rate": 9.093938941836012e-07, | |
| "loss": 0.0, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 4.514511873350924, | |
| "grad_norm": 0.0009113789419643581, | |
| "learning_rate": 9.001754898023512e-07, | |
| "loss": 0.0, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 4.522427440633246, | |
| "grad_norm": 0.0009940057061612606, | |
| "learning_rate": 8.909937767619369e-07, | |
| "loss": 0.0, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 4.530343007915567, | |
| "grad_norm": 0.0010979474755004048, | |
| "learning_rate": 8.818489656413042e-07, | |
| "loss": 0.0, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 4.538258575197889, | |
| "grad_norm": 0.0011025301646441221, | |
| "learning_rate": 8.727412661730724e-07, | |
| "loss": 0.0, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 4.546174142480211, | |
| "grad_norm": 0.0005905535072088242, | |
| "learning_rate": 8.636708872387162e-07, | |
| "loss": 0.0, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 4.554089709762533, | |
| "grad_norm": 0.0008613502723164856, | |
| "learning_rate": 8.546380368637813e-07, | |
| "loss": 0.0, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 4.562005277044855, | |
| "grad_norm": 0.0017304850043728948, | |
| "learning_rate": 8.456429222131083e-07, | |
| "loss": 0.0, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 4.569920844327177, | |
| "grad_norm": 0.0012761703692376614, | |
| "learning_rate": 8.366857495860869e-07, | |
| "loss": 0.0, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 4.577836411609499, | |
| "grad_norm": 0.0012462437152862549, | |
| "learning_rate": 8.277667244119186e-07, | |
| "loss": 0.0, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 4.585751978891821, | |
| "grad_norm": 0.0016653670463711023, | |
| "learning_rate": 8.188860512449107e-07, | |
| "loss": 0.0, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 4.593667546174142, | |
| "grad_norm": 0.001680879620835185, | |
| "learning_rate": 8.100439337597799e-07, | |
| "loss": 0.0, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 4.601583113456464, | |
| "grad_norm": 0.0027815212961286306, | |
| "learning_rate": 8.012405747469861e-07, | |
| "loss": 0.0, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 4.6094986807387865, | |
| "grad_norm": 0.0005703975912183523, | |
| "learning_rate": 7.924761761080769e-07, | |
| "loss": 0.0, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 4.617414248021108, | |
| "grad_norm": 0.002361526945605874, | |
| "learning_rate": 7.837509388510611e-07, | |
| "loss": 0.0, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 4.62532981530343, | |
| "grad_norm": 0.0720648542046547, | |
| "learning_rate": 7.750650630857948e-07, | |
| "loss": 0.0001, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 4.633245382585752, | |
| "grad_norm": 0.0045934757217764854, | |
| "learning_rate": 7.66418748019396e-07, | |
| "loss": 0.0002, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 4.641160949868074, | |
| "grad_norm": 0.1026143878698349, | |
| "learning_rate": 7.578121919516712e-07, | |
| "loss": 0.0, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 4.649076517150396, | |
| "grad_norm": 0.0007057770853862166, | |
| "learning_rate": 7.492455922705727e-07, | |
| "loss": 0.0, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 4.6569920844327175, | |
| "grad_norm": 0.003926987759768963, | |
| "learning_rate": 7.407191454476667e-07, | |
| "loss": 0.0, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 4.66490765171504, | |
| "grad_norm": 0.011395251378417015, | |
| "learning_rate": 7.322330470336314e-07, | |
| "loss": 0.0, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 4.672823218997362, | |
| "grad_norm": 0.0008481740369461477, | |
| "learning_rate": 7.23787491653769e-07, | |
| "loss": 0.0, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 4.680738786279683, | |
| "grad_norm": 0.0006975684082135558, | |
| "learning_rate": 7.153826730035424e-07, | |
| "loss": 0.0, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 4.688654353562005, | |
| "grad_norm": 0.0006951978430151939, | |
| "learning_rate": 7.07018783844137e-07, | |
| "loss": 0.0, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 4.696569920844327, | |
| "grad_norm": 0.001249202759936452, | |
| "learning_rate": 6.986960159980327e-07, | |
| "loss": 0.0, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 4.704485488126649, | |
| "grad_norm": 0.0005832965252920985, | |
| "learning_rate": 6.904145603446116e-07, | |
| "loss": 0.0, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 4.7124010554089715, | |
| "grad_norm": 0.0016783899627625942, | |
| "learning_rate": 6.821746068157742e-07, | |
| "loss": 0.0, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 4.720316622691293, | |
| "grad_norm": 0.0008286430966109037, | |
| "learning_rate": 6.739763443915894e-07, | |
| "loss": 0.0, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 4.728232189973615, | |
| "grad_norm": 0.0010895330924540758, | |
| "learning_rate": 6.658199610959537e-07, | |
| "loss": 0.0, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 4.736147757255937, | |
| "grad_norm": 0.0011518665123730898, | |
| "learning_rate": 6.577056439922858e-07, | |
| "loss": 0.0, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 4.744063324538258, | |
| "grad_norm": 0.0006547801312990487, | |
| "learning_rate": 6.496335791792294e-07, | |
| "loss": 0.0, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 4.75197889182058, | |
| "grad_norm": 0.0016805746126919985, | |
| "learning_rate": 6.41603951786392e-07, | |
| "loss": 0.0, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 4.759894459102902, | |
| "grad_norm": 0.0005475433426909149, | |
| "learning_rate": 6.336169459700933e-07, | |
| "loss": 0.0, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 4.7678100263852246, | |
| "grad_norm": 0.002318611601367593, | |
| "learning_rate": 6.25672744909146e-07, | |
| "loss": 0.0, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 4.775725593667546, | |
| "grad_norm": 0.0010817993897944689, | |
| "learning_rate": 6.177715308006505e-07, | |
| "loss": 0.0, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 4.783641160949868, | |
| "grad_norm": 0.0008523711585439742, | |
| "learning_rate": 6.099134848558208e-07, | |
| "loss": 0.0, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 4.79155672823219, | |
| "grad_norm": 0.0021770992316305637, | |
| "learning_rate": 6.020987872958237e-07, | |
| "loss": 0.0, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 4.799472295514512, | |
| "grad_norm": 0.000966981693636626, | |
| "learning_rate": 5.943276173476509e-07, | |
| "loss": 0.0, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 4.807387862796833, | |
| "grad_norm": 0.0008673115517012775, | |
| "learning_rate": 5.866001532400023e-07, | |
| "loss": 0.0, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 4.8153034300791555, | |
| "grad_norm": 0.005670213606208563, | |
| "learning_rate": 5.789165721992052e-07, | |
| "loss": 0.0, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 4.823218997361478, | |
| "grad_norm": 0.0012314619962126017, | |
| "learning_rate": 5.712770504451426e-07, | |
| "loss": 0.0, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 4.8311345646438, | |
| "grad_norm": 0.0005614141700789332, | |
| "learning_rate": 5.636817631872185e-07, | |
| "loss": 0.0, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 4.839050131926121, | |
| "grad_norm": 0.0005666129291057587, | |
| "learning_rate": 5.561308846203333e-07, | |
| "loss": 0.0, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 4.846965699208443, | |
| "grad_norm": 0.0006194331217557192, | |
| "learning_rate": 5.486245879208946e-07, | |
| "loss": 0.0, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 4.854881266490765, | |
| "grad_norm": 0.001858290983363986, | |
| "learning_rate": 5.411630452428396e-07, | |
| "loss": 0.0, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 4.862796833773087, | |
| "grad_norm": 0.001191300223581493, | |
| "learning_rate": 5.337464277136925e-07, | |
| "loss": 0.0, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 4.870712401055409, | |
| "grad_norm": 0.0004950195434503257, | |
| "learning_rate": 5.263749054306347e-07, | |
| "loss": 0.0, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 4.878627968337731, | |
| "grad_norm": 0.0005656416760757565, | |
| "learning_rate": 5.190486474566084e-07, | |
| "loss": 0.0, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 4.886543535620053, | |
| "grad_norm": 0.0006986209191381931, | |
| "learning_rate": 5.117678218164337e-07, | |
| "loss": 0.0, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 4.894459102902375, | |
| "grad_norm": 0.0009121168404817581, | |
| "learning_rate": 5.045325954929614e-07, | |
| "loss": 0.0, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 4.902374670184696, | |
| "grad_norm": 0.0010522707598283887, | |
| "learning_rate": 4.973431344232377e-07, | |
| "loss": 0.0, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 4.910290237467018, | |
| "grad_norm": 0.001362331211566925, | |
| "learning_rate": 4.901996034947026e-07, | |
| "loss": 0.0, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 4.91820580474934, | |
| "grad_norm": 0.0011633161921054125, | |
| "learning_rate": 4.831021665414043e-07, | |
| "loss": 0.0, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 4.926121372031663, | |
| "grad_norm": 0.0009165364899672568, | |
| "learning_rate": 4.7605098634024684e-07, | |
| "loss": 0.0, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 4.934036939313984, | |
| "grad_norm": 0.0018295511836186051, | |
| "learning_rate": 4.6904622460725163e-07, | |
| "loss": 0.0, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 4.941952506596306, | |
| "grad_norm": 0.0016117612831294537, | |
| "learning_rate": 4.6208804199385114e-07, | |
| "loss": 0.0, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 4.949868073878628, | |
| "grad_norm": 0.0007937783957459033, | |
| "learning_rate": 4.55176598083206e-07, | |
| "loss": 0.0, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 4.95778364116095, | |
| "grad_norm": 0.0007202769047580659, | |
| "learning_rate": 4.4831205138654105e-07, | |
| "loss": 0.0001, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 4.965699208443271, | |
| "grad_norm": 0.05181964114308357, | |
| "learning_rate": 4.41494559339514e-07, | |
| "loss": 0.0, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 4.9736147757255935, | |
| "grad_norm": 0.001464763772673905, | |
| "learning_rate": 4.347242782986008e-07, | |
| "loss": 0.0, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 4.981530343007916, | |
| "grad_norm": 0.0013263854198157787, | |
| "learning_rate": 4.280013635375138e-07, | |
| "loss": 0.0, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 4.989445910290238, | |
| "grad_norm": 0.025266623124480247, | |
| "learning_rate": 4.2132596924363666e-07, | |
| "loss": 0.0, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 4.997361477572559, | |
| "grad_norm": 0.0006922471802681684, | |
| "learning_rate": 4.1469824851449207e-07, | |
| "loss": 0.0, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "grad_norm": 0.0006922471802681684, | |
| "learning_rate": 4.081183533542263e-07, | |
| "loss": 0.0, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 5.007915567282322, | |
| "grad_norm": 0.005173252429813147, | |
| "learning_rate": 4.015864346701251e-07, | |
| "loss": 0.0, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 5.015831134564644, | |
| "grad_norm": 0.0011017130455002189, | |
| "learning_rate": 3.951026422691556e-07, | |
| "loss": 0.0, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 5.0237467018469655, | |
| "grad_norm": 0.0004939161008223891, | |
| "learning_rate": 3.886671248545243e-07, | |
| "loss": 0.0, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 5.031662269129288, | |
| "grad_norm": 0.0009218998602591455, | |
| "learning_rate": 3.822800300222726e-07, | |
| "loss": 0.0, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 5.03957783641161, | |
| "grad_norm": 0.003040511626750231, | |
| "learning_rate": 3.7594150425788677e-07, | |
| "loss": 0.0, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 5.047493403693931, | |
| "grad_norm": 0.0007424689829349518, | |
| "learning_rate": 3.6965169293294356e-07, | |
| "loss": 0.0, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 5.055408970976253, | |
| "grad_norm": 0.0007790547097101808, | |
| "learning_rate": 3.6341074030177114e-07, | |
| "loss": 0.0, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 5.063324538258575, | |
| "grad_norm": 0.0007425297517329454, | |
| "learning_rate": 3.5721878949814327e-07, | |
| "loss": 0.0, | |
| "step": 643 | |
| }, | |
| { | |
| "epoch": 5.071240105540897, | |
| "grad_norm": 0.0014690719544887543, | |
| "learning_rate": 3.510759825319976e-07, | |
| "loss": 0.0, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 5.0791556728232194, | |
| "grad_norm": 0.0012174295261502266, | |
| "learning_rate": 3.4498246028617536e-07, | |
| "loss": 0.0, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 5.087071240105541, | |
| "grad_norm": 0.0009372502681799233, | |
| "learning_rate": 3.389383625131942e-07, | |
| "loss": 0.0, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 5.094986807387863, | |
| "grad_norm": 0.0024167634546756744, | |
| "learning_rate": 3.3294382783203906e-07, | |
| "loss": 0.0, | |
| "step": 647 | |
| }, | |
| { | |
| "epoch": 5.102902374670185, | |
| "grad_norm": 0.0007574869669042528, | |
| "learning_rate": 3.2699899372498736e-07, | |
| "loss": 0.0, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 5.110817941952506, | |
| "grad_norm": 0.0014448463916778564, | |
| "learning_rate": 3.211039965344512e-07, | |
| "loss": 0.0, | |
| "step": 649 | |
| }, | |
| { | |
| "epoch": 5.118733509234828, | |
| "grad_norm": 0.0006641830550506711, | |
| "learning_rate": 3.152589714598547e-07, | |
| "loss": 0.0, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 5.12664907651715, | |
| "grad_norm": 0.0009468268253840506, | |
| "learning_rate": 3.094640525545295e-07, | |
| "loss": 0.0, | |
| "step": 651 | |
| }, | |
| { | |
| "epoch": 5.1345646437994725, | |
| "grad_norm": 0.0009490539086982608, | |
| "learning_rate": 3.0371937272264454e-07, | |
| "loss": 0.0, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 5.142480211081795, | |
| "grad_norm": 0.02954496629536152, | |
| "learning_rate": 2.980250637161525e-07, | |
| "loss": 0.0001, | |
| "step": 653 | |
| }, | |
| { | |
| "epoch": 5.150395778364116, | |
| "grad_norm": 0.00169454887509346, | |
| "learning_rate": 2.9238125613177406e-07, | |
| "loss": 0.0, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 5.158311345646438, | |
| "grad_norm": 0.0008946408634074032, | |
| "learning_rate": 2.8678807940799746e-07, | |
| "loss": 0.0, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 5.16622691292876, | |
| "grad_norm": 0.000559095002245158, | |
| "learning_rate": 2.8124566182211434e-07, | |
| "loss": 0.0, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 5.174142480211081, | |
| "grad_norm": 0.0010668574832379818, | |
| "learning_rate": 2.7575413048727324e-07, | |
| "loss": 0.0, | |
| "step": 657 | |
| }, | |
| { | |
| "epoch": 5.1820580474934035, | |
| "grad_norm": 0.000942812766879797, | |
| "learning_rate": 2.7031361134956913e-07, | |
| "loss": 0.0, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 5.189973614775726, | |
| "grad_norm": 0.0011439559748396277, | |
| "learning_rate": 2.649242291851503e-07, | |
| "loss": 0.0002, | |
| "step": 659 | |
| }, | |
| { | |
| "epoch": 5.197889182058048, | |
| "grad_norm": 0.05758407711982727, | |
| "learning_rate": 2.5958610759736133e-07, | |
| "loss": 0.0, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 5.205804749340369, | |
| "grad_norm": 0.0009985921205952764, | |
| "learning_rate": 2.5429936901390286e-07, | |
| "loss": 0.0, | |
| "step": 661 | |
| }, | |
| { | |
| "epoch": 5.213720316622691, | |
| "grad_norm": 0.0009654366876929998, | |
| "learning_rate": 2.490641346840292e-07, | |
| "loss": 0.0, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 5.221635883905013, | |
| "grad_norm": 0.0016246228478848934, | |
| "learning_rate": 2.438805246757631e-07, | |
| "loss": 0.0, | |
| "step": 663 | |
| }, | |
| { | |
| "epoch": 5.229551451187335, | |
| "grad_norm": 0.0005871817120350897, | |
| "learning_rate": 2.38748657873146e-07, | |
| "loss": 0.0, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 5.237467018469657, | |
| "grad_norm": 0.002709415042772889, | |
| "learning_rate": 2.3366865197350736e-07, | |
| "loss": 0.0, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 5.245382585751979, | |
| "grad_norm": 0.0011816696496680379, | |
| "learning_rate": 2.2864062348476908e-07, | |
| "loss": 0.0, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 5.253298153034301, | |
| "grad_norm": 0.0010984864784404635, | |
| "learning_rate": 2.2366468772276995e-07, | |
| "loss": 0.0, | |
| "step": 667 | |
| }, | |
| { | |
| "epoch": 5.261213720316623, | |
| "grad_norm": 0.0010330368531867862, | |
| "learning_rate": 2.1874095880862505e-07, | |
| "loss": 0.0, | |
| "step": 668 | |
| }, | |
| { | |
| "epoch": 5.269129287598945, | |
| "grad_norm": 0.0011752013815566897, | |
| "learning_rate": 2.138695496661039e-07, | |
| "loss": 0.0, | |
| "step": 669 | |
| }, | |
| { | |
| "epoch": 5.277044854881266, | |
| "grad_norm": 0.0007527798879891634, | |
| "learning_rate": 2.0905057201904444e-07, | |
| "loss": 0.0, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 5.284960422163588, | |
| "grad_norm": 0.0005292408750392497, | |
| "learning_rate": 2.0428413638878764e-07, | |
| "loss": 0.0, | |
| "step": 671 | |
| }, | |
| { | |
| "epoch": 5.2928759894459105, | |
| "grad_norm": 0.0011747851967811584, | |
| "learning_rate": 1.9957035209164562e-07, | |
| "loss": 0.0, | |
| "step": 672 | |
| }, | |
| { | |
| "epoch": 5.300791556728232, | |
| "grad_norm": 0.0007565742125734687, | |
| "learning_rate": 1.9490932723639166e-07, | |
| "loss": 0.0, | |
| "step": 673 | |
| }, | |
| { | |
| "epoch": 5.308707124010554, | |
| "grad_norm": 0.001347986632026732, | |
| "learning_rate": 1.9030116872178317e-07, | |
| "loss": 0.0, | |
| "step": 674 | |
| }, | |
| { | |
| "epoch": 5.316622691292876, | |
| "grad_norm": 0.0011456775246188045, | |
| "learning_rate": 1.8574598223410873e-07, | |
| "loss": 0.0, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 5.324538258575198, | |
| "grad_norm": 0.0005695118452422321, | |
| "learning_rate": 1.8124387224476347e-07, | |
| "loss": 0.0, | |
| "step": 676 | |
| }, | |
| { | |
| "epoch": 5.33245382585752, | |
| "grad_norm": 0.0010689334012567997, | |
| "learning_rate": 1.76794942007856e-07, | |
| "loss": 0.0, | |
| "step": 677 | |
| }, | |
| { | |
| "epoch": 5.3403693931398415, | |
| "grad_norm": 0.0007800321909599006, | |
| "learning_rate": 1.723992935578367e-07, | |
| "loss": 0.0, | |
| "step": 678 | |
| }, | |
| { | |
| "epoch": 5.348284960422164, | |
| "grad_norm": 0.0005303187062963843, | |
| "learning_rate": 1.6805702770716054e-07, | |
| "loss": 0.0, | |
| "step": 679 | |
| }, | |
| { | |
| "epoch": 5.356200527704486, | |
| "grad_norm": 0.0007535629556514323, | |
| "learning_rate": 1.6376824404397252e-07, | |
| "loss": 0.0, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 5.364116094986807, | |
| "grad_norm": 0.0009670981089584529, | |
| "learning_rate": 1.5953304092982625e-07, | |
| "loss": 0.0, | |
| "step": 681 | |
| }, | |
| { | |
| "epoch": 5.372031662269129, | |
| "grad_norm": 0.0013728707563132048, | |
| "learning_rate": 1.5535151549742527e-07, | |
| "loss": 0.0, | |
| "step": 682 | |
| }, | |
| { | |
| "epoch": 5.379947229551451, | |
| "grad_norm": 0.0009662070660851896, | |
| "learning_rate": 1.5122376364839818e-07, | |
| "loss": 0.0, | |
| "step": 683 | |
| }, | |
| { | |
| "epoch": 5.387862796833773, | |
| "grad_norm": 0.0006931365351192653, | |
| "learning_rate": 1.471498800510962e-07, | |
| "loss": 0.0, | |
| "step": 684 | |
| }, | |
| { | |
| "epoch": 5.395778364116095, | |
| "grad_norm": 0.0006757751107215881, | |
| "learning_rate": 1.431299581384249e-07, | |
| "loss": 0.0, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 5.403693931398417, | |
| "grad_norm": 0.002196143614128232, | |
| "learning_rate": 1.3916409010569925e-07, | |
| "loss": 0.0, | |
| "step": 686 | |
| }, | |
| { | |
| "epoch": 5.411609498680739, | |
| "grad_norm": 0.004199303686618805, | |
| "learning_rate": 1.3525236690853093e-07, | |
| "loss": 0.0, | |
| "step": 687 | |
| }, | |
| { | |
| "epoch": 5.419525065963061, | |
| "grad_norm": 0.0006531027611345053, | |
| "learning_rate": 1.3139487826073937e-07, | |
| "loss": 0.0, | |
| "step": 688 | |
| }, | |
| { | |
| "epoch": 5.427440633245382, | |
| "grad_norm": 0.0013951828004792333, | |
| "learning_rate": 1.2759171263229814e-07, | |
| "loss": 0.0, | |
| "step": 689 | |
| }, | |
| { | |
| "epoch": 5.435356200527704, | |
| "grad_norm": 0.002030656673014164, | |
| "learning_rate": 1.2384295724730266e-07, | |
| "loss": 0.0, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 5.443271767810026, | |
| "grad_norm": 0.0007671468774788082, | |
| "learning_rate": 1.201486980819716e-07, | |
| "loss": 0.0, | |
| "step": 691 | |
| }, | |
| { | |
| "epoch": 5.4511873350923485, | |
| "grad_norm": 0.0006623528315685689, | |
| "learning_rate": 1.1650901986267365e-07, | |
| "loss": 0.0001, | |
| "step": 692 | |
| }, | |
| { | |
| "epoch": 5.45910290237467, | |
| "grad_norm": 0.027323342859745026, | |
| "learning_rate": 1.1292400606398635e-07, | |
| "loss": 0.0, | |
| "step": 693 | |
| }, | |
| { | |
| "epoch": 5.467018469656992, | |
| "grad_norm": 0.03837458789348602, | |
| "learning_rate": 1.0939373890677923e-07, | |
| "loss": 0.0001, | |
| "step": 694 | |
| }, | |
| { | |
| "epoch": 5.474934036939314, | |
| "grad_norm": 0.0006547007360495627, | |
| "learning_rate": 1.0591829935633041e-07, | |
| "loss": 0.0, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 5.482849604221636, | |
| "grad_norm": 0.0017067458247765899, | |
| "learning_rate": 1.0249776712046745e-07, | |
| "loss": 0.0, | |
| "step": 696 | |
| }, | |
| { | |
| "epoch": 5.490765171503957, | |
| "grad_norm": 0.0008273804560303688, | |
| "learning_rate": 9.913222064774159e-08, | |
| "loss": 0.0, | |
| "step": 697 | |
| }, | |
| { | |
| "epoch": 5.4986807387862795, | |
| "grad_norm": 0.0014368380652740598, | |
| "learning_rate": 9.58217371256262e-08, | |
| "loss": 0.0, | |
| "step": 698 | |
| }, | |
| { | |
| "epoch": 5.506596306068602, | |
| "grad_norm": 0.0013807985233142972, | |
| "learning_rate": 9.256639247874872e-08, | |
| "loss": 0.0, | |
| "step": 699 | |
| }, | |
| { | |
| "epoch": 5.514511873350924, | |
| "grad_norm": 0.000646974251139909, | |
| "learning_rate": 8.936626136714754e-08, | |
| "loss": 0.0, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 5.522427440633246, | |
| "grad_norm": 0.0009724789415486157, | |
| "learning_rate": 8.622141718456128e-08, | |
| "loss": 0.0, | |
| "step": 701 | |
| }, | |
| { | |
| "epoch": 5.530343007915567, | |
| "grad_norm": 0.0005859123775735497, | |
| "learning_rate": 8.313193205674391e-08, | |
| "loss": 0.0, | |
| "step": 702 | |
| }, | |
| { | |
| "epoch": 5.538258575197889, | |
| "grad_norm": 0.0008421401726081967, | |
| "learning_rate": 8.009787683981279e-08, | |
| "loss": 0.0, | |
| "step": 703 | |
| }, | |
| { | |
| "epoch": 5.546174142480211, | |
| "grad_norm": 0.0007554542389698327, | |
| "learning_rate": 7.711932111862024e-08, | |
| "loss": 0.0, | |
| "step": 704 | |
| }, | |
| { | |
| "epoch": 5.554089709762533, | |
| "grad_norm": 0.0012514356058090925, | |
| "learning_rate": 7.419633320516178e-08, | |
| "loss": 0.0, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 5.562005277044855, | |
| "grad_norm": 0.0006436012336052954, | |
| "learning_rate": 7.13289801370054e-08, | |
| "loss": 0.0, | |
| "step": 706 | |
| }, | |
| { | |
| "epoch": 5.569920844327177, | |
| "grad_norm": 0.0008593889651820064, | |
| "learning_rate": 6.851732767575752e-08, | |
| "loss": 0.0, | |
| "step": 707 | |
| }, | |
| { | |
| "epoch": 5.577836411609499, | |
| "grad_norm": 0.0008925219299271703, | |
| "learning_rate": 6.576144030555259e-08, | |
| "loss": 0.0, | |
| "step": 708 | |
| }, | |
| { | |
| "epoch": 5.585751978891821, | |
| "grad_norm": 0.0005661491304636002, | |
| "learning_rate": 6.30613812315739e-08, | |
| "loss": 0.0, | |
| "step": 709 | |
| }, | |
| { | |
| "epoch": 5.593667546174142, | |
| "grad_norm": 0.0011487139854580164, | |
| "learning_rate": 6.041721237860677e-08, | |
| "loss": 0.0, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 5.601583113456464, | |
| "grad_norm": 0.003276489209383726, | |
| "learning_rate": 5.7828994389614866e-08, | |
| "loss": 0.0, | |
| "step": 711 | |
| }, | |
| { | |
| "epoch": 5.6094986807387865, | |
| "grad_norm": 0.001058993162587285, | |
| "learning_rate": 5.529678662435228e-08, | |
| "loss": 0.0, | |
| "step": 712 | |
| }, | |
| { | |
| "epoch": 5.617414248021108, | |
| "grad_norm": 0.0013507738476619124, | |
| "learning_rate": 5.282064715799895e-08, | |
| "loss": 0.0, | |
| "step": 713 | |
| }, | |
| { | |
| "epoch": 5.62532981530343, | |
| "grad_norm": 0.0010608804877847433, | |
| "learning_rate": 5.040063277983287e-08, | |
| "loss": 0.0, | |
| "step": 714 | |
| }, | |
| { | |
| "epoch": 5.633245382585752, | |
| "grad_norm": 0.003138932166621089, | |
| "learning_rate": 4.8036798991923925e-08, | |
| "loss": 0.0, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 5.641160949868074, | |
| "grad_norm": 0.0009938941802829504, | |
| "learning_rate": 4.5729200007862686e-08, | |
| "loss": 0.0, | |
| "step": 716 | |
| }, | |
| { | |
| "epoch": 5.649076517150396, | |
| "grad_norm": 0.0007328402134589851, | |
| "learning_rate": 4.34778887515172e-08, | |
| "loss": 0.0, | |
| "step": 717 | |
| }, | |
| { | |
| "epoch": 5.6569920844327175, | |
| "grad_norm": 0.00361031387001276, | |
| "learning_rate": 4.128291685581792e-08, | |
| "loss": 0.0, | |
| "step": 718 | |
| }, | |
| { | |
| "epoch": 5.66490765171504, | |
| "grad_norm": 0.05315789207816124, | |
| "learning_rate": 3.914433466157608e-08, | |
| "loss": 0.0001, | |
| "step": 719 | |
| }, | |
| { | |
| "epoch": 5.672823218997362, | |
| "grad_norm": 0.0006082054460421205, | |
| "learning_rate": 3.70621912163252e-08, | |
| "loss": 0.0, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 5.680738786279683, | |
| "grad_norm": 0.0013073241570964456, | |
| "learning_rate": 3.503653427320036e-08, | |
| "loss": 0.0, | |
| "step": 721 | |
| }, | |
| { | |
| "epoch": 5.688654353562005, | |
| "grad_norm": 0.001491551985964179, | |
| "learning_rate": 3.3067410289840115e-08, | |
| "loss": 0.0, | |
| "step": 722 | |
| }, | |
| { | |
| "epoch": 5.696569920844327, | |
| "grad_norm": 0.0005928871687501669, | |
| "learning_rate": 3.115486442732268e-08, | |
| "loss": 0.0, | |
| "step": 723 | |
| }, | |
| { | |
| "epoch": 5.704485488126649, | |
| "grad_norm": 0.0020740528125315905, | |
| "learning_rate": 2.9298940549128962e-08, | |
| "loss": 0.0, | |
| "step": 724 | |
| }, | |
| { | |
| "epoch": 5.7124010554089715, | |
| "grad_norm": 0.0016710623167455196, | |
| "learning_rate": 2.7499681220136685e-08, | |
| "loss": 0.0, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 5.720316622691293, | |
| "grad_norm": 0.0006300493841990829, | |
| "learning_rate": 2.5757127705645924e-08, | |
| "loss": 0.0, | |
| "step": 726 | |
| }, | |
| { | |
| "epoch": 5.728232189973615, | |
| "grad_norm": 0.0006173982401378453, | |
| "learning_rate": 2.4071319970430385e-08, | |
| "loss": 0.0, | |
| "step": 727 | |
| }, | |
| { | |
| "epoch": 5.736147757255937, | |
| "grad_norm": 0.0008473024354316294, | |
| "learning_rate": 2.244229667782205e-08, | |
| "loss": 0.0, | |
| "step": 728 | |
| }, | |
| { | |
| "epoch": 5.744063324538258, | |
| "grad_norm": 0.0009805875597521663, | |
| "learning_rate": 2.0870095188824103e-08, | |
| "loss": 0.0, | |
| "step": 729 | |
| }, | |
| { | |
| "epoch": 5.75197889182058, | |
| "grad_norm": 0.000995925860479474, | |
| "learning_rate": 1.9354751561254937e-08, | |
| "loss": 0.0, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 5.759894459102902, | |
| "grad_norm": 0.002532000420615077, | |
| "learning_rate": 1.789630054891883e-08, | |
| "loss": 0.0, | |
| "step": 731 | |
| }, | |
| { | |
| "epoch": 5.7678100263852246, | |
| "grad_norm": 0.0007621280383318663, | |
| "learning_rate": 1.6494775600812418e-08, | |
| "loss": 0.0, | |
| "step": 732 | |
| }, | |
| { | |
| "epoch": 5.775725593667546, | |
| "grad_norm": 0.000711779051925987, | |
| "learning_rate": 1.5150208860354175e-08, | |
| "loss": 0.0, | |
| "step": 733 | |
| }, | |
| { | |
| "epoch": 5.783641160949868, | |
| "grad_norm": 0.0008426412241533399, | |
| "learning_rate": 1.3862631164649477e-08, | |
| "loss": 0.0, | |
| "step": 734 | |
| }, | |
| { | |
| "epoch": 5.79155672823219, | |
| "grad_norm": 0.001399231143295765, | |
| "learning_rate": 1.2632072043782251e-08, | |
| "loss": 0.0, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 5.799472295514512, | |
| "grad_norm": 0.0010957148624584079, | |
| "learning_rate": 1.1458559720137762e-08, | |
| "loss": 0.0, | |
| "step": 736 | |
| }, | |
| { | |
| "epoch": 5.807387862796833, | |
| "grad_norm": 0.0024443091824650764, | |
| "learning_rate": 1.0342121107755898e-08, | |
| "loss": 0.0, | |
| "step": 737 | |
| }, | |
| { | |
| "epoch": 5.8153034300791555, | |
| "grad_norm": 0.000554680940695107, | |
| "learning_rate": 9.282781811714159e-09, | |
| "loss": 0.0, | |
| "step": 738 | |
| }, | |
| { | |
| "epoch": 5.823218997361478, | |
| "grad_norm": 0.0025695902295410633, | |
| "learning_rate": 8.280566127538691e-09, | |
| "loss": 0.0, | |
| "step": 739 | |
| }, | |
| { | |
| "epoch": 5.8311345646438, | |
| "grad_norm": 0.0008379130740649998, | |
| "learning_rate": 7.335497040648898e-09, | |
| "loss": 0.0, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 5.839050131926121, | |
| "grad_norm": 0.0008131481008604169, | |
| "learning_rate": 6.4475962258297994e-09, | |
| "loss": 0.0, | |
| "step": 741 | |
| }, | |
| { | |
| "epoch": 5.846965699208443, | |
| "grad_norm": 0.000910057220607996, | |
| "learning_rate": 5.616884046734383e-09, | |
| "loss": 0.0, | |
| "step": 742 | |
| }, | |
| { | |
| "epoch": 5.854881266490765, | |
| "grad_norm": 0.0007016519666649401, | |
| "learning_rate": 4.843379555417304e-09, | |
| "loss": 0.0, | |
| "step": 743 | |
| }, | |
| { | |
| "epoch": 5.862796833773087, | |
| "grad_norm": 0.0006324381101876497, | |
| "learning_rate": 4.1271004918971845e-09, | |
| "loss": 0.0, | |
| "step": 744 | |
| }, | |
| { | |
| "epoch": 5.870712401055409, | |
| "grad_norm": 0.0005984574090689421, | |
| "learning_rate": 3.468063283750267e-09, | |
| "loss": 0.0, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 5.878627968337731, | |
| "grad_norm": 0.0007798295700922608, | |
| "learning_rate": 2.866283045734053e-09, | |
| "loss": 0.0, | |
| "step": 746 | |
| }, | |
| { | |
| "epoch": 5.886543535620053, | |
| "grad_norm": 0.0009426471078768373, | |
| "learning_rate": 2.321773579439246e-09, | |
| "loss": 0.0, | |
| "step": 747 | |
| }, | |
| { | |
| "epoch": 5.894459102902375, | |
| "grad_norm": 0.0007083627278916538, | |
| "learning_rate": 1.834547372975004e-09, | |
| "loss": 0.0, | |
| "step": 748 | |
| }, | |
| { | |
| "epoch": 5.902374670184696, | |
| "grad_norm": 0.0004903904045931995, | |
| "learning_rate": 1.4046156006808365e-09, | |
| "loss": 0.0, | |
| "step": 749 | |
| }, | |
| { | |
| "epoch": 5.910290237467018, | |
| "grad_norm": 0.0010145300766453147, | |
| "learning_rate": 1.03198812287153e-09, | |
| "loss": 0.0, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 5.91820580474934, | |
| "grad_norm": 0.0005329736159183085, | |
| "learning_rate": 7.166734856103863e-10, | |
| "loss": 0.0, | |
| "step": 751 | |
| }, | |
| { | |
| "epoch": 5.926121372031663, | |
| "grad_norm": 0.000705968588590622, | |
| "learning_rate": 4.586789205140996e-10, | |
| "loss": 0.0, | |
| "step": 752 | |
| }, | |
| { | |
| "epoch": 5.934036939313984, | |
| "grad_norm": 0.0006150389090180397, | |
| "learning_rate": 2.58010344585391e-10, | |
| "loss": 0.0, | |
| "step": 753 | |
| }, | |
| { | |
| "epoch": 5.941952506596306, | |
| "grad_norm": 0.02292468585073948, | |
| "learning_rate": 1.1467236007867144e-10, | |
| "loss": 0.0, | |
| "step": 754 | |
| }, | |
| { | |
| "epoch": 5.949868073878628, | |
| "grad_norm": 0.00048279500333592296, | |
| "learning_rate": 2.8668254393460216e-11, | |
| "loss": 0.0, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 5.95778364116095, | |
| "grad_norm": 0.0013129240833222866, | |
| "learning_rate": 0.0, | |
| "loss": 0.0, | |
| "step": 756 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 756, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 6, | |
| "save_steps": 126, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 1.9012636204282675e+18, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |