| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.9894459102902373, | |
| "eval_steps": 500, | |
| "global_step": 252, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0079155672823219, | |
| "grad_norm": 34.582191467285156, | |
| "learning_rate": 5.0000000000000004e-08, | |
| "loss": 2.1743, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0158311345646438, | |
| "grad_norm": 35.2754020690918, | |
| "learning_rate": 1.0000000000000001e-07, | |
| "loss": 2.195, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.023746701846965697, | |
| "grad_norm": 35.12391662597656, | |
| "learning_rate": 1.5000000000000002e-07, | |
| "loss": 2.1971, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.0316622691292876, | |
| "grad_norm": 34.45759201049805, | |
| "learning_rate": 2.0000000000000002e-07, | |
| "loss": 2.1701, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.0395778364116095, | |
| "grad_norm": 35.35009002685547, | |
| "learning_rate": 2.5000000000000004e-07, | |
| "loss": 2.2099, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.047493403693931395, | |
| "grad_norm": 35.039520263671875, | |
| "learning_rate": 3.0000000000000004e-07, | |
| "loss": 2.2169, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.055408970976253295, | |
| "grad_norm": 35.103248596191406, | |
| "learning_rate": 3.5000000000000004e-07, | |
| "loss": 2.2121, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.0633245382585752, | |
| "grad_norm": 34.65024185180664, | |
| "learning_rate": 4.0000000000000003e-07, | |
| "loss": 2.1715, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.0712401055408971, | |
| "grad_norm": 35.055023193359375, | |
| "learning_rate": 4.5000000000000003e-07, | |
| "loss": 2.1571, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.079155672823219, | |
| "grad_norm": 34.82114028930664, | |
| "learning_rate": 5.000000000000001e-07, | |
| "loss": 2.1428, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.0870712401055409, | |
| "grad_norm": 33.663883209228516, | |
| "learning_rate": 5.5e-07, | |
| "loss": 2.0656, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.09498680738786279, | |
| "grad_norm": 33.614967346191406, | |
| "learning_rate": 6.000000000000001e-07, | |
| "loss": 2.05, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.10290237467018469, | |
| "grad_norm": 32.66334915161133, | |
| "learning_rate": 6.5e-07, | |
| "loss": 1.9905, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.11081794195250659, | |
| "grad_norm": 31.83934211730957, | |
| "learning_rate": 7.000000000000001e-07, | |
| "loss": 1.8981, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.11873350923482849, | |
| "grad_norm": 32.067840576171875, | |
| "learning_rate": 7.5e-07, | |
| "loss": 1.8803, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.1266490765171504, | |
| "grad_norm": 31.775592803955078, | |
| "learning_rate": 8.000000000000001e-07, | |
| "loss": 1.8215, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.1345646437994723, | |
| "grad_norm": 30.205060958862305, | |
| "learning_rate": 8.500000000000001e-07, | |
| "loss": 1.6654, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.1424802110817942, | |
| "grad_norm": 30.699304580688477, | |
| "learning_rate": 9.000000000000001e-07, | |
| "loss": 1.6149, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.1503957783641161, | |
| "grad_norm": 30.16368293762207, | |
| "learning_rate": 9.500000000000001e-07, | |
| "loss": 1.4905, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.158311345646438, | |
| "grad_norm": 30.227079391479492, | |
| "learning_rate": 1.0000000000000002e-06, | |
| "loss": 1.3845, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.1662269129287599, | |
| "grad_norm": 30.42757797241211, | |
| "learning_rate": 1.0500000000000001e-06, | |
| "loss": 1.2462, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.1741424802110818, | |
| "grad_norm": 30.44344711303711, | |
| "learning_rate": 1.1e-06, | |
| "loss": 1.0948, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.1820580474934037, | |
| "grad_norm": 30.735376358032227, | |
| "learning_rate": 1.1500000000000002e-06, | |
| "loss": 0.9618, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.18997361477572558, | |
| "grad_norm": 30.048675537109375, | |
| "learning_rate": 1.2000000000000002e-06, | |
| "loss": 0.7738, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.19788918205804748, | |
| "grad_norm": 30.161460876464844, | |
| "learning_rate": 1.25e-06, | |
| "loss": 0.6424, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.20580474934036938, | |
| "grad_norm": 28.648540496826172, | |
| "learning_rate": 1.3e-06, | |
| "loss": 0.4602, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.21372031662269128, | |
| "grad_norm": 25.193084716796875, | |
| "learning_rate": 1.3500000000000002e-06, | |
| "loss": 0.3309, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.22163588390501318, | |
| "grad_norm": 18.437116622924805, | |
| "learning_rate": 1.4000000000000001e-06, | |
| "loss": 0.2206, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.22955145118733508, | |
| "grad_norm": 12.642715454101562, | |
| "learning_rate": 1.45e-06, | |
| "loss": 0.139, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.23746701846965698, | |
| "grad_norm": 6.1605305671691895, | |
| "learning_rate": 1.5e-06, | |
| "loss": 0.0982, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.24538258575197888, | |
| "grad_norm": 2.986103057861328, | |
| "learning_rate": 1.5500000000000002e-06, | |
| "loss": 0.0707, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.2532981530343008, | |
| "grad_norm": 2.030493974685669, | |
| "learning_rate": 1.6000000000000001e-06, | |
| "loss": 0.0637, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.2612137203166227, | |
| "grad_norm": 1.5127642154693604, | |
| "learning_rate": 1.6500000000000003e-06, | |
| "loss": 0.0616, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.2691292875989446, | |
| "grad_norm": 1.8087722063064575, | |
| "learning_rate": 1.7000000000000002e-06, | |
| "loss": 0.0555, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.2770448548812665, | |
| "grad_norm": 1.3533340692520142, | |
| "learning_rate": 1.75e-06, | |
| "loss": 0.0517, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.2849604221635884, | |
| "grad_norm": 1.3595997095108032, | |
| "learning_rate": 1.8000000000000001e-06, | |
| "loss": 0.0537, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.2928759894459103, | |
| "grad_norm": 1.2778421640396118, | |
| "learning_rate": 1.85e-06, | |
| "loss": 0.0483, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.3007915567282322, | |
| "grad_norm": 1.0834269523620605, | |
| "learning_rate": 1.9000000000000002e-06, | |
| "loss": 0.0471, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.3087071240105541, | |
| "grad_norm": 1.175179362297058, | |
| "learning_rate": 1.9500000000000004e-06, | |
| "loss": 0.0528, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.316622691292876, | |
| "grad_norm": 0.9915143847465515, | |
| "learning_rate": 2.0000000000000003e-06, | |
| "loss": 0.0525, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.3245382585751979, | |
| "grad_norm": 0.7996736168861389, | |
| "learning_rate": 2.05e-06, | |
| "loss": 0.0479, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.3324538258575198, | |
| "grad_norm": 0.7375659942626953, | |
| "learning_rate": 2.1000000000000002e-06, | |
| "loss": 0.0509, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.3403693931398417, | |
| "grad_norm": 0.7712483406066895, | |
| "learning_rate": 2.15e-06, | |
| "loss": 0.0484, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.3482849604221636, | |
| "grad_norm": 0.9321597218513489, | |
| "learning_rate": 2.2e-06, | |
| "loss": 0.0448, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.3562005277044855, | |
| "grad_norm": 1.02247953414917, | |
| "learning_rate": 2.25e-06, | |
| "loss": 0.0484, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.3641160949868074, | |
| "grad_norm": 0.857718825340271, | |
| "learning_rate": 2.3000000000000004e-06, | |
| "loss": 0.0425, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.3720316622691293, | |
| "grad_norm": 1.0721040964126587, | |
| "learning_rate": 2.35e-06, | |
| "loss": 0.0472, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.37994722955145116, | |
| "grad_norm": 0.811705470085144, | |
| "learning_rate": 2.4000000000000003e-06, | |
| "loss": 0.039, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.38786279683377306, | |
| "grad_norm": 0.9680790901184082, | |
| "learning_rate": 2.4500000000000003e-06, | |
| "loss": 0.0423, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.39577836411609496, | |
| "grad_norm": 0.9470660090446472, | |
| "learning_rate": 2.5e-06, | |
| "loss": 0.0411, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.40369393139841686, | |
| "grad_norm": 0.8540447950363159, | |
| "learning_rate": 2.55e-06, | |
| "loss": 0.049, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.41160949868073876, | |
| "grad_norm": 1.39769446849823, | |
| "learning_rate": 2.6e-06, | |
| "loss": 0.0478, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.41952506596306066, | |
| "grad_norm": 0.8914775848388672, | |
| "learning_rate": 2.6500000000000005e-06, | |
| "loss": 0.0415, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.42744063324538256, | |
| "grad_norm": 0.759353518486023, | |
| "learning_rate": 2.7000000000000004e-06, | |
| "loss": 0.046, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.43535620052770446, | |
| "grad_norm": 0.8632487058639526, | |
| "learning_rate": 2.7500000000000004e-06, | |
| "loss": 0.0435, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.44327176781002636, | |
| "grad_norm": 0.6662724018096924, | |
| "learning_rate": 2.8000000000000003e-06, | |
| "loss": 0.0496, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.45118733509234826, | |
| "grad_norm": 0.8853585124015808, | |
| "learning_rate": 2.85e-06, | |
| "loss": 0.0384, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.45910290237467016, | |
| "grad_norm": 0.7003194093704224, | |
| "learning_rate": 2.9e-06, | |
| "loss": 0.0446, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.46701846965699206, | |
| "grad_norm": 0.8177786469459534, | |
| "learning_rate": 2.95e-06, | |
| "loss": 0.0417, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.47493403693931396, | |
| "grad_norm": 0.9565925598144531, | |
| "learning_rate": 3e-06, | |
| "loss": 0.044, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.48284960422163586, | |
| "grad_norm": 0.7401254177093506, | |
| "learning_rate": 3.05e-06, | |
| "loss": 0.0439, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.49076517150395776, | |
| "grad_norm": 0.9952861666679382, | |
| "learning_rate": 3.1000000000000004e-06, | |
| "loss": 0.0371, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.49868073878627966, | |
| "grad_norm": 0.9622153639793396, | |
| "learning_rate": 3.1500000000000003e-06, | |
| "loss": 0.0457, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.5065963060686016, | |
| "grad_norm": 0.7078511118888855, | |
| "learning_rate": 3.2000000000000003e-06, | |
| "loss": 0.0396, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.5145118733509235, | |
| "grad_norm": 0.9370516538619995, | |
| "learning_rate": 3.2500000000000002e-06, | |
| "loss": 0.0421, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.5224274406332454, | |
| "grad_norm": 0.6465123891830444, | |
| "learning_rate": 3.3000000000000006e-06, | |
| "loss": 0.0341, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.5303430079155673, | |
| "grad_norm": 0.7659086585044861, | |
| "learning_rate": 3.3500000000000005e-06, | |
| "loss": 0.0429, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.5382585751978892, | |
| "grad_norm": 0.7772212028503418, | |
| "learning_rate": 3.4000000000000005e-06, | |
| "loss": 0.036, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.5461741424802111, | |
| "grad_norm": 0.7115844488143921, | |
| "learning_rate": 3.45e-06, | |
| "loss": 0.0327, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.554089709762533, | |
| "grad_norm": 0.6542766690254211, | |
| "learning_rate": 3.5e-06, | |
| "loss": 0.0362, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.5620052770448549, | |
| "grad_norm": 0.7394366264343262, | |
| "learning_rate": 3.5500000000000003e-06, | |
| "loss": 0.0426, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.5699208443271768, | |
| "grad_norm": 0.8163363337516785, | |
| "learning_rate": 3.6000000000000003e-06, | |
| "loss": 0.0344, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.5778364116094987, | |
| "grad_norm": 0.7115806937217712, | |
| "learning_rate": 3.65e-06, | |
| "loss": 0.0379, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.5857519788918206, | |
| "grad_norm": 0.6192121505737305, | |
| "learning_rate": 3.7e-06, | |
| "loss": 0.0276, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.5936675461741425, | |
| "grad_norm": 0.822684645652771, | |
| "learning_rate": 3.7500000000000005e-06, | |
| "loss": 0.0308, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.6015831134564644, | |
| "grad_norm": 0.8095663189888, | |
| "learning_rate": 3.8000000000000005e-06, | |
| "loss": 0.0303, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.6094986807387863, | |
| "grad_norm": 0.7700952887535095, | |
| "learning_rate": 3.85e-06, | |
| "loss": 0.0328, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.6174142480211082, | |
| "grad_norm": 0.7777529358863831, | |
| "learning_rate": 3.900000000000001e-06, | |
| "loss": 0.0344, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.6253298153034301, | |
| "grad_norm": 0.826386034488678, | |
| "learning_rate": 3.95e-06, | |
| "loss": 0.033, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.633245382585752, | |
| "grad_norm": 0.788942277431488, | |
| "learning_rate": 4.000000000000001e-06, | |
| "loss": 0.0331, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.6411609498680739, | |
| "grad_norm": 0.7351260185241699, | |
| "learning_rate": 4.05e-06, | |
| "loss": 0.0346, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.6490765171503958, | |
| "grad_norm": 0.5737812519073486, | |
| "learning_rate": 4.1e-06, | |
| "loss": 0.027, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.6569920844327177, | |
| "grad_norm": 0.7053394913673401, | |
| "learning_rate": 4.15e-06, | |
| "loss": 0.034, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.6649076517150396, | |
| "grad_norm": 0.7508617639541626, | |
| "learning_rate": 4.2000000000000004e-06, | |
| "loss": 0.0323, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.6728232189973615, | |
| "grad_norm": 1.0235975980758667, | |
| "learning_rate": 4.25e-06, | |
| "loss": 0.0395, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.6807387862796834, | |
| "grad_norm": 0.6522803902626038, | |
| "learning_rate": 4.3e-06, | |
| "loss": 0.0262, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.6886543535620053, | |
| "grad_norm": 1.182160496711731, | |
| "learning_rate": 4.350000000000001e-06, | |
| "loss": 0.0387, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.6965699208443272, | |
| "grad_norm": 0.6571248769760132, | |
| "learning_rate": 4.4e-06, | |
| "loss": 0.0262, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.7044854881266491, | |
| "grad_norm": 0.6641517877578735, | |
| "learning_rate": 4.450000000000001e-06, | |
| "loss": 0.0265, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.712401055408971, | |
| "grad_norm": 0.8751306533813477, | |
| "learning_rate": 4.5e-06, | |
| "loss": 0.0307, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.7203166226912929, | |
| "grad_norm": 0.7627044320106506, | |
| "learning_rate": 4.5500000000000005e-06, | |
| "loss": 0.0332, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.7282321899736148, | |
| "grad_norm": 0.7124688625335693, | |
| "learning_rate": 4.600000000000001e-06, | |
| "loss": 0.0299, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.7361477572559367, | |
| "grad_norm": 0.7688237428665161, | |
| "learning_rate": 4.65e-06, | |
| "loss": 0.0258, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.7440633245382586, | |
| "grad_norm": 0.8663469552993774, | |
| "learning_rate": 4.7e-06, | |
| "loss": 0.0342, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.7519788918205804, | |
| "grad_norm": 0.6536213755607605, | |
| "learning_rate": 4.75e-06, | |
| "loss": 0.0226, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.7598944591029023, | |
| "grad_norm": 0.5616201758384705, | |
| "learning_rate": 4.800000000000001e-06, | |
| "loss": 0.022, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.7678100263852242, | |
| "grad_norm": 0.7985079884529114, | |
| "learning_rate": 4.85e-06, | |
| "loss": 0.0305, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.7757255936675461, | |
| "grad_norm": 0.726844847202301, | |
| "learning_rate": 4.9000000000000005e-06, | |
| "loss": 0.0243, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.783641160949868, | |
| "grad_norm": 0.656874418258667, | |
| "learning_rate": 4.95e-06, | |
| "loss": 0.0243, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.7915567282321899, | |
| "grad_norm": 0.8744286894798279, | |
| "learning_rate": 5e-06, | |
| "loss": 0.0261, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.7994722955145118, | |
| "grad_norm": 1.0138187408447266, | |
| "learning_rate": 4.9999713317456065e-06, | |
| "loss": 0.0254, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.8073878627968337, | |
| "grad_norm": 0.6964263319969177, | |
| "learning_rate": 4.9998853276399215e-06, | |
| "loss": 0.0263, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.8153034300791556, | |
| "grad_norm": 0.6374551057815552, | |
| "learning_rate": 4.999741989655415e-06, | |
| "loss": 0.0228, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.8232189973614775, | |
| "grad_norm": 0.6274930834770203, | |
| "learning_rate": 4.999541321079486e-06, | |
| "loss": 0.0286, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.8311345646437994, | |
| "grad_norm": 0.6344143748283386, | |
| "learning_rate": 4.99928332651439e-06, | |
| "loss": 0.0258, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.8390501319261213, | |
| "grad_norm": 0.8304360508918762, | |
| "learning_rate": 4.998968011877129e-06, | |
| "loss": 0.022, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.8469656992084432, | |
| "grad_norm": 0.6463897228240967, | |
| "learning_rate": 4.998595384399319e-06, | |
| "loss": 0.0184, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.8548812664907651, | |
| "grad_norm": 0.5647016763687134, | |
| "learning_rate": 4.998165452627025e-06, | |
| "loss": 0.0221, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.862796833773087, | |
| "grad_norm": 0.5621638298034668, | |
| "learning_rate": 4.997678226420561e-06, | |
| "loss": 0.022, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.8707124010554089, | |
| "grad_norm": 0.8295672535896301, | |
| "learning_rate": 4.997133716954266e-06, | |
| "loss": 0.0206, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.8786279683377308, | |
| "grad_norm": 0.8057272434234619, | |
| "learning_rate": 4.99653193671625e-06, | |
| "loss": 0.0204, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.8865435356200527, | |
| "grad_norm": 0.6956275105476379, | |
| "learning_rate": 4.995872899508103e-06, | |
| "loss": 0.0186, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.8944591029023746, | |
| "grad_norm": 0.7873161435127258, | |
| "learning_rate": 4.995156620444584e-06, | |
| "loss": 0.0149, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.9023746701846965, | |
| "grad_norm": 0.9187519550323486, | |
| "learning_rate": 4.994383115953266e-06, | |
| "loss": 0.0183, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.9102902374670184, | |
| "grad_norm": 0.5987403392791748, | |
| "learning_rate": 4.9935524037741705e-06, | |
| "loss": 0.0158, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.9182058047493403, | |
| "grad_norm": 0.6011790037155151, | |
| "learning_rate": 4.992664502959351e-06, | |
| "loss": 0.016, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.9261213720316622, | |
| "grad_norm": 0.8429304957389832, | |
| "learning_rate": 4.991719433872461e-06, | |
| "loss": 0.0107, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.9340369393139841, | |
| "grad_norm": 0.8618797659873962, | |
| "learning_rate": 4.990717218188286e-06, | |
| "loss": 0.0162, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.941952506596306, | |
| "grad_norm": 1.1907824277877808, | |
| "learning_rate": 4.989657878892245e-06, | |
| "loss": 0.0173, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.9498680738786279, | |
| "grad_norm": 0.7317540645599365, | |
| "learning_rate": 4.988541440279862e-06, | |
| "loss": 0.0183, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.9577836411609498, | |
| "grad_norm": 0.6827302575111389, | |
| "learning_rate": 4.987367927956218e-06, | |
| "loss": 0.0192, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.9656992084432717, | |
| "grad_norm": 0.6512569189071655, | |
| "learning_rate": 4.986137368835351e-06, | |
| "loss": 0.0169, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.9736147757255936, | |
| "grad_norm": 0.5439648032188416, | |
| "learning_rate": 4.984849791139647e-06, | |
| "loss": 0.0136, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.9815303430079155, | |
| "grad_norm": 0.5652760863304138, | |
| "learning_rate": 4.983505224399188e-06, | |
| "loss": 0.0171, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.9894459102902374, | |
| "grad_norm": 0.6217207908630371, | |
| "learning_rate": 4.9821036994510816e-06, | |
| "loss": 0.0175, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.9973614775725593, | |
| "grad_norm": 0.5073563456535339, | |
| "learning_rate": 4.980645248438746e-06, | |
| "loss": 0.0166, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 0.5073563456535339, | |
| "learning_rate": 4.979129904811177e-06, | |
| "loss": 0.0103, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 1.007915567282322, | |
| "grad_norm": 0.9930127263069153, | |
| "learning_rate": 4.977557703322178e-06, | |
| "loss": 0.0116, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 1.0158311345646438, | |
| "grad_norm": 0.4781687259674072, | |
| "learning_rate": 4.975928680029571e-06, | |
| "loss": 0.0097, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 1.0237467018469657, | |
| "grad_norm": 0.5260384678840637, | |
| "learning_rate": 4.974242872294354e-06, | |
| "loss": 0.0131, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 1.0316622691292876, | |
| "grad_norm": 0.6748254895210266, | |
| "learning_rate": 4.972500318779864e-06, | |
| "loss": 0.0093, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 1.0395778364116095, | |
| "grad_norm": 0.42848217487335205, | |
| "learning_rate": 4.970701059450872e-06, | |
| "loss": 0.0086, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 1.0474934036939314, | |
| "grad_norm": 0.8815901279449463, | |
| "learning_rate": 4.968845135572678e-06, | |
| "loss": 0.0103, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 1.0554089709762533, | |
| "grad_norm": 0.5253162980079651, | |
| "learning_rate": 4.96693258971016e-06, | |
| "loss": 0.0088, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 1.0633245382585752, | |
| "grad_norm": 0.6259243488311768, | |
| "learning_rate": 4.9649634657268e-06, | |
| "loss": 0.0074, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 1.071240105540897, | |
| "grad_norm": 1.0937525033950806, | |
| "learning_rate": 4.962937808783675e-06, | |
| "loss": 0.0115, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 1.079155672823219, | |
| "grad_norm": 1.0612727403640747, | |
| "learning_rate": 4.960855665338424e-06, | |
| "loss": 0.009, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 1.087071240105541, | |
| "grad_norm": 0.9188615679740906, | |
| "learning_rate": 4.958717083144182e-06, | |
| "loss": 0.0127, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 1.0949868073878628, | |
| "grad_norm": 0.6099897623062134, | |
| "learning_rate": 4.956522111248483e-06, | |
| "loss": 0.0088, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 1.1029023746701847, | |
| "grad_norm": 0.6967251896858215, | |
| "learning_rate": 4.954270799992138e-06, | |
| "loss": 0.0088, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 1.1108179419525066, | |
| "grad_norm": 0.6113755702972412, | |
| "learning_rate": 4.9519632010080765e-06, | |
| "loss": 0.0079, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 1.1187335092348285, | |
| "grad_norm": 0.6708074808120728, | |
| "learning_rate": 4.9495993672201675e-06, | |
| "loss": 0.0093, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 1.1266490765171504, | |
| "grad_norm": 0.425584614276886, | |
| "learning_rate": 4.947179352842001e-06, | |
| "loss": 0.0093, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 1.1345646437994723, | |
| "grad_norm": 0.3952641785144806, | |
| "learning_rate": 4.944703213375648e-06, | |
| "loss": 0.0057, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 1.1424802110817942, | |
| "grad_norm": 0.39029237627983093, | |
| "learning_rate": 4.942171005610385e-06, | |
| "loss": 0.0106, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 1.150395778364116, | |
| "grad_norm": 0.5346894264221191, | |
| "learning_rate": 4.939582787621394e-06, | |
| "loss": 0.0062, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 1.158311345646438, | |
| "grad_norm": 0.48500752449035645, | |
| "learning_rate": 4.936938618768427e-06, | |
| "loss": 0.0077, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 1.16622691292876, | |
| "grad_norm": 0.5172581076622009, | |
| "learning_rate": 4.934238559694448e-06, | |
| "loss": 0.0122, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 1.1741424802110818, | |
| "grad_norm": 0.571660578250885, | |
| "learning_rate": 4.9314826723242425e-06, | |
| "loss": 0.0107, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 1.1820580474934037, | |
| "grad_norm": 0.558224081993103, | |
| "learning_rate": 4.928671019862995e-06, | |
| "loss": 0.0062, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 1.1899736147757256, | |
| "grad_norm": 0.46116045117378235, | |
| "learning_rate": 4.925803666794839e-06, | |
| "loss": 0.0089, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 1.1978891820580475, | |
| "grad_norm": 0.5193414092063904, | |
| "learning_rate": 4.92288067888138e-06, | |
| "loss": 0.0084, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 1.2058047493403694, | |
| "grad_norm": 0.6195410490036011, | |
| "learning_rate": 4.919902123160187e-06, | |
| "loss": 0.0115, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 1.2137203166226913, | |
| "grad_norm": 0.5878890752792358, | |
| "learning_rate": 4.9168680679432565e-06, | |
| "loss": 0.0075, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 1.2216358839050132, | |
| "grad_norm": 0.3049341142177582, | |
| "learning_rate": 4.913778582815439e-06, | |
| "loss": 0.0071, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 1.229551451187335, | |
| "grad_norm": 0.9924205541610718, | |
| "learning_rate": 4.9106337386328524e-06, | |
| "loss": 0.0121, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 1.237467018469657, | |
| "grad_norm": 0.44824329018592834, | |
| "learning_rate": 4.907433607521252e-06, | |
| "loss": 0.0066, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 1.245382585751979, | |
| "grad_norm": 0.7608261108398438, | |
| "learning_rate": 4.904178262874374e-06, | |
| "loss": 0.0046, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 1.2532981530343008, | |
| "grad_norm": 0.4237998425960541, | |
| "learning_rate": 4.900867779352258e-06, | |
| "loss": 0.0032, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 1.2612137203166227, | |
| "grad_norm": 0.9337185025215149, | |
| "learning_rate": 4.897502232879533e-06, | |
| "loss": 0.0061, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 1.2691292875989446, | |
| "grad_norm": 0.6228697299957275, | |
| "learning_rate": 4.89408170064367e-06, | |
| "loss": 0.0057, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 1.2770448548812665, | |
| "grad_norm": 0.41712895035743713, | |
| "learning_rate": 4.890606261093221e-06, | |
| "loss": 0.0046, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 1.2849604221635884, | |
| "grad_norm": 0.6179379820823669, | |
| "learning_rate": 4.887075993936014e-06, | |
| "loss": 0.0045, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 1.2928759894459103, | |
| "grad_norm": 0.3739337921142578, | |
| "learning_rate": 4.883490980137327e-06, | |
| "loss": 0.0061, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 1.3007915567282322, | |
| "grad_norm": 0.5171535015106201, | |
| "learning_rate": 4.8798513019180295e-06, | |
| "loss": 0.0073, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 1.3087071240105541, | |
| "grad_norm": 0.640513002872467, | |
| "learning_rate": 4.876157042752698e-06, | |
| "loss": 0.0043, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 1.316622691292876, | |
| "grad_norm": 0.4904448390007019, | |
| "learning_rate": 4.872408287367702e-06, | |
| "loss": 0.0044, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 1.324538258575198, | |
| "grad_norm": 0.4022831618785858, | |
| "learning_rate": 4.868605121739261e-06, | |
| "loss": 0.0019, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 1.3324538258575198, | |
| "grad_norm": 0.35701292753219604, | |
| "learning_rate": 4.86474763309147e-06, | |
| "loss": 0.0048, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 1.3403693931398417, | |
| "grad_norm": 1.2709388732910156, | |
| "learning_rate": 4.8608359098943014e-06, | |
| "loss": 0.0073, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 1.3482849604221636, | |
| "grad_norm": 1.1490532159805298, | |
| "learning_rate": 4.856870041861576e-06, | |
| "loss": 0.0082, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 1.3562005277044855, | |
| "grad_norm": 1.9450149536132812, | |
| "learning_rate": 4.8528501199489045e-06, | |
| "loss": 0.0047, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 1.3641160949868074, | |
| "grad_norm": 0.2517525553703308, | |
| "learning_rate": 4.848776236351602e-06, | |
| "loss": 0.0101, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 1.3720316622691293, | |
| "grad_norm": 0.612678050994873, | |
| "learning_rate": 4.8446484845025754e-06, | |
| "loss": 0.0064, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 1.3799472295514512, | |
| "grad_norm": 0.8020515441894531, | |
| "learning_rate": 4.840466959070174e-06, | |
| "loss": 0.0077, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 1.3878627968337731, | |
| "grad_norm": 0.46455827355384827, | |
| "learning_rate": 4.836231755956028e-06, | |
| "loss": 0.0034, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 1.395778364116095, | |
| "grad_norm": 0.4173589050769806, | |
| "learning_rate": 4.83194297229284e-06, | |
| "loss": 0.0065, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 1.403693931398417, | |
| "grad_norm": 0.37849071621894836, | |
| "learning_rate": 4.827600706442164e-06, | |
| "loss": 0.0062, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 1.4116094986807388, | |
| "grad_norm": 0.6507642269134521, | |
| "learning_rate": 4.823205057992145e-06, | |
| "loss": 0.0054, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 1.4195250659630607, | |
| "grad_norm": 0.41411933302879333, | |
| "learning_rate": 4.8187561277552376e-06, | |
| "loss": 0.0056, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 1.4274406332453826, | |
| "grad_norm": 0.2742733359336853, | |
| "learning_rate": 4.8142540177658925e-06, | |
| "loss": 0.0022, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 1.4353562005277045, | |
| "grad_norm": 0.4650178849697113, | |
| "learning_rate": 4.809698831278217e-06, | |
| "loss": 0.0054, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 1.4432717678100264, | |
| "grad_norm": 0.2831101715564728, | |
| "learning_rate": 4.805090672763609e-06, | |
| "loss": 0.0041, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 1.4511873350923483, | |
| "grad_norm": 0.4332253634929657, | |
| "learning_rate": 4.800429647908354e-06, | |
| "loss": 0.0024, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 1.4591029023746702, | |
| "grad_norm": 0.13565762341022491, | |
| "learning_rate": 4.795715863611212e-06, | |
| "loss": 0.0025, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 1.4670184696569921, | |
| "grad_norm": 0.36338773369789124, | |
| "learning_rate": 4.790949427980956e-06, | |
| "loss": 0.0006, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 1.474934036939314, | |
| "grad_norm": 0.6935271620750427, | |
| "learning_rate": 4.786130450333897e-06, | |
| "loss": 0.0034, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 1.482849604221636, | |
| "grad_norm": 0.7220961451530457, | |
| "learning_rate": 4.7812590411913755e-06, | |
| "loss": 0.0041, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 1.4907651715039578, | |
| "grad_norm": 0.26949548721313477, | |
| "learning_rate": 4.77633531227723e-06, | |
| "loss": 0.0025, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 1.4986807387862797, | |
| "grad_norm": 0.6303775906562805, | |
| "learning_rate": 4.771359376515231e-06, | |
| "loss": 0.005, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 1.5065963060686016, | |
| "grad_norm": 0.7352619171142578, | |
| "learning_rate": 4.766331348026493e-06, | |
| "loss": 0.0061, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 1.5145118733509235, | |
| "grad_norm": 0.6918866038322449, | |
| "learning_rate": 4.7612513421268546e-06, | |
| "loss": 0.0056, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 1.5224274406332454, | |
| "grad_norm": 1.1295803785324097, | |
| "learning_rate": 4.756119475324237e-06, | |
| "loss": 0.0042, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 1.5303430079155673, | |
| "grad_norm": 0.4663296043872833, | |
| "learning_rate": 4.750935865315972e-06, | |
| "loss": 0.0026, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 1.5382585751978892, | |
| "grad_norm": 0.19712182879447937, | |
| "learning_rate": 4.745700630986097e-06, | |
| "loss": 0.0012, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 1.5461741424802111, | |
| "grad_norm": 0.322085440158844, | |
| "learning_rate": 4.740413892402639e-06, | |
| "loss": 0.0053, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 1.554089709762533, | |
| "grad_norm": 0.4675672650337219, | |
| "learning_rate": 4.73507577081485e-06, | |
| "loss": 0.0043, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 1.562005277044855, | |
| "grad_norm": 0.5347087979316711, | |
| "learning_rate": 4.7296863886504315e-06, | |
| "loss": 0.0063, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 1.5699208443271768, | |
| "grad_norm": 0.48838165402412415, | |
| "learning_rate": 4.7242458695127275e-06, | |
| "loss": 0.0029, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 1.5778364116094987, | |
| "grad_norm": 0.42908889055252075, | |
| "learning_rate": 4.718754338177887e-06, | |
| "loss": 0.0049, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 1.5857519788918206, | |
| "grad_norm": 0.2672640085220337, | |
| "learning_rate": 4.713211920592003e-06, | |
| "loss": 0.0025, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 1.5936675461741425, | |
| "grad_norm": 0.7370240092277527, | |
| "learning_rate": 4.707618743868226e-06, | |
| "loss": 0.0033, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 1.6015831134564644, | |
| "grad_norm": 0.3037024736404419, | |
| "learning_rate": 4.701974936283848e-06, | |
| "loss": 0.0028, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 1.6094986807387863, | |
| "grad_norm": 0.3099062442779541, | |
| "learning_rate": 4.696280627277356e-06, | |
| "loss": 0.0026, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 1.6174142480211082, | |
| "grad_norm": 0.16460958123207092, | |
| "learning_rate": 4.690535947445471e-06, | |
| "loss": 0.006, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 1.6253298153034301, | |
| "grad_norm": 0.43918171525001526, | |
| "learning_rate": 4.6847410285401465e-06, | |
| "loss": 0.0037, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 1.633245382585752, | |
| "grad_norm": 0.2510988414287567, | |
| "learning_rate": 4.67889600346555e-06, | |
| "loss": 0.0014, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 1.641160949868074, | |
| "grad_norm": 0.27360427379608154, | |
| "learning_rate": 4.673001006275013e-06, | |
| "loss": 0.0019, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 1.6490765171503958, | |
| "grad_norm": 0.16950200498104095, | |
| "learning_rate": 4.667056172167962e-06, | |
| "loss": 0.001, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 1.6569920844327177, | |
| "grad_norm": 0.27843964099884033, | |
| "learning_rate": 4.6610616374868066e-06, | |
| "loss": 0.0037, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 1.6649076517150396, | |
| "grad_norm": 0.5085106492042542, | |
| "learning_rate": 4.655017539713826e-06, | |
| "loss": 0.0028, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 1.6728232189973615, | |
| "grad_norm": 0.28183189034461975, | |
| "learning_rate": 4.648924017468003e-06, | |
| "loss": 0.0003, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 1.6807387862796834, | |
| "grad_norm": 0.38587337732315063, | |
| "learning_rate": 4.642781210501858e-06, | |
| "loss": 0.0052, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 1.6886543535620053, | |
| "grad_norm": 0.4533829987049103, | |
| "learning_rate": 4.6365892596982295e-06, | |
| "loss": 0.0036, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 1.6965699208443272, | |
| "grad_norm": 0.9035217761993408, | |
| "learning_rate": 4.6303483070670574e-06, | |
| "loss": 0.0022, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 1.7044854881266491, | |
| "grad_norm": 0.545513391494751, | |
| "learning_rate": 4.624058495742115e-06, | |
| "loss": 0.0019, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 1.712401055408971, | |
| "grad_norm": 0.09330940991640091, | |
| "learning_rate": 4.617719969977729e-06, | |
| "loss": 0.0027, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 1.720316622691293, | |
| "grad_norm": 0.4759463667869568, | |
| "learning_rate": 4.611332875145476e-06, | |
| "loss": 0.002, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 1.7282321899736148, | |
| "grad_norm": 0.2812918722629547, | |
| "learning_rate": 4.604897357730846e-06, | |
| "loss": 0.0033, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 1.7361477572559367, | |
| "grad_norm": 0.5131822228431702, | |
| "learning_rate": 4.598413565329876e-06, | |
| "loss": 0.0019, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 1.7440633245382586, | |
| "grad_norm": 0.17534379661083221, | |
| "learning_rate": 4.591881646645775e-06, | |
| "loss": 0.0014, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 1.7519788918205803, | |
| "grad_norm": 0.2808787524700165, | |
| "learning_rate": 4.585301751485508e-06, | |
| "loss": 0.0011, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 1.7598944591029024, | |
| "grad_norm": 0.1664104461669922, | |
| "learning_rate": 4.578674030756364e-06, | |
| "loss": 0.0013, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 1.767810026385224, | |
| "grad_norm": 0.11474815756082535, | |
| "learning_rate": 4.571998636462487e-06, | |
| "loss": 0.0003, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 1.7757255936675462, | |
| "grad_norm": 0.2994535267353058, | |
| "learning_rate": 4.5652757217014e-06, | |
| "loss": 0.0043, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 1.783641160949868, | |
| "grad_norm": 0.2962826192378998, | |
| "learning_rate": 4.5585054406604865e-06, | |
| "loss": 0.0012, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 1.79155672823219, | |
| "grad_norm": 0.10045085847377777, | |
| "learning_rate": 4.551687948613459e-06, | |
| "loss": 0.0016, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 1.7994722955145117, | |
| "grad_norm": 0.8167795538902283, | |
| "learning_rate": 4.544823401916794e-06, | |
| "loss": 0.0024, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 1.8073878627968338, | |
| "grad_norm": 0.562492847442627, | |
| "learning_rate": 4.537911958006149e-06, | |
| "loss": 0.0029, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 1.8153034300791555, | |
| "grad_norm": 0.12345612794160843, | |
| "learning_rate": 4.530953775392749e-06, | |
| "loss": 0.0006, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 1.8232189973614776, | |
| "grad_norm": 0.1261640191078186, | |
| "learning_rate": 4.523949013659754e-06, | |
| "loss": 0.0006, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 1.8311345646437993, | |
| "grad_norm": 0.2126503586769104, | |
| "learning_rate": 4.5168978334585955e-06, | |
| "loss": 0.0005, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 1.8390501319261214, | |
| "grad_norm": 0.23717330396175385, | |
| "learning_rate": 4.509800396505298e-06, | |
| "loss": 0.0019, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 1.8469656992084431, | |
| "grad_norm": 0.2814564108848572, | |
| "learning_rate": 4.502656865576762e-06, | |
| "loss": 0.0019, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 1.8548812664907652, | |
| "grad_norm": 0.12869513034820557, | |
| "learning_rate": 4.495467404507039e-06, | |
| "loss": 0.0007, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 1.862796833773087, | |
| "grad_norm": 0.45971599221229553, | |
| "learning_rate": 4.4882321781835666e-06, | |
| "loss": 0.0007, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 1.870712401055409, | |
| "grad_norm": 0.15887171030044556, | |
| "learning_rate": 4.4809513525433925e-06, | |
| "loss": 0.0009, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 1.8786279683377307, | |
| "grad_norm": 0.26040664315223694, | |
| "learning_rate": 4.473625094569366e-06, | |
| "loss": 0.0005, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 1.8865435356200528, | |
| "grad_norm": 0.6354907155036926, | |
| "learning_rate": 4.466253572286308e-06, | |
| "loss": 0.0012, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 1.8944591029023745, | |
| "grad_norm": 0.5247849225997925, | |
| "learning_rate": 4.458836954757161e-06, | |
| "loss": 0.0018, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 1.9023746701846966, | |
| "grad_norm": 0.14983093738555908, | |
| "learning_rate": 4.4513754120791065e-06, | |
| "loss": 0.0004, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 1.9102902374670183, | |
| "grad_norm": 0.1560702919960022, | |
| "learning_rate": 4.443869115379667e-06, | |
| "loss": 0.0011, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 1.9182058047493404, | |
| "grad_norm": 0.5521268248558044, | |
| "learning_rate": 4.436318236812782e-06, | |
| "loss": 0.0002, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 1.9261213720316621, | |
| "grad_norm": 0.34709402918815613, | |
| "learning_rate": 4.428722949554858e-06, | |
| "loss": 0.0009, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 1.9340369393139842, | |
| "grad_norm": 0.4721614122390747, | |
| "learning_rate": 4.421083427800795e-06, | |
| "loss": 0.0014, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 1.941952506596306, | |
| "grad_norm": 0.12338114529848099, | |
| "learning_rate": 4.413399846759998e-06, | |
| "loss": 0.0005, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 1.949868073878628, | |
| "grad_norm": 0.2648129463195801, | |
| "learning_rate": 4.405672382652349e-06, | |
| "loss": 0.0035, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 1.9577836411609497, | |
| "grad_norm": 0.41379258036613464, | |
| "learning_rate": 4.397901212704176e-06, | |
| "loss": 0.0001, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 1.9656992084432718, | |
| "grad_norm": 0.04220602661371231, | |
| "learning_rate": 4.390086515144179e-06, | |
| "loss": 0.0002, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 1.9736147757255935, | |
| "grad_norm": 0.04110397771000862, | |
| "learning_rate": 4.38222846919935e-06, | |
| "loss": 0.0001, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 1.9815303430079156, | |
| "grad_norm": 0.027100196108222008, | |
| "learning_rate": 4.3743272550908545e-06, | |
| "loss": 0.0001, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 1.9894459102902373, | |
| "grad_norm": 0.0841871052980423, | |
| "learning_rate": 4.366383054029907e-06, | |
| "loss": 0.0002, | |
| "step": 252 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 756, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 6, | |
| "save_steps": 126, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 6.348772231190938e+17, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |