| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "eval_steps": 500, | |
| "global_step": 1661, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0006020469596628537, | |
| "grad_norm": 40.966713680527505, | |
| "learning_rate": 5.98802395209581e-08, | |
| "loss": 2.0373, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0030102347983142685, | |
| "grad_norm": 39.14616421730173, | |
| "learning_rate": 2.9940119760479047e-07, | |
| "loss": 2.0379, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.006020469596628537, | |
| "grad_norm": 27.910266092453746, | |
| "learning_rate": 5.988023952095809e-07, | |
| "loss": 1.9953, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.009030704394942806, | |
| "grad_norm": 15.201013735116844, | |
| "learning_rate": 8.982035928143713e-07, | |
| "loss": 1.7804, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.012040939193257074, | |
| "grad_norm": 5.295520564674036, | |
| "learning_rate": 1.1976047904191619e-06, | |
| "loss": 1.502, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.015051173991571343, | |
| "grad_norm": 4.314026946089969, | |
| "learning_rate": 1.4970059880239521e-06, | |
| "loss": 1.3422, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.018061408789885613, | |
| "grad_norm": 2.8393277241114694, | |
| "learning_rate": 1.7964071856287426e-06, | |
| "loss": 1.2716, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.02107164358819988, | |
| "grad_norm": 1.9881246281959908, | |
| "learning_rate": 2.095808383233533e-06, | |
| "loss": 1.142, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.024081878386514148, | |
| "grad_norm": 1.4576393482889902, | |
| "learning_rate": 2.3952095808383237e-06, | |
| "loss": 1.091, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.027092113184828417, | |
| "grad_norm": 1.3452736306984228, | |
| "learning_rate": 2.694610778443114e-06, | |
| "loss": 1.0808, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.030102347983142687, | |
| "grad_norm": 1.2853833849515133, | |
| "learning_rate": 2.9940119760479042e-06, | |
| "loss": 1.0534, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.033112582781456956, | |
| "grad_norm": 1.3461267648995063, | |
| "learning_rate": 3.2934131736526947e-06, | |
| "loss": 1.0335, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.036122817579771226, | |
| "grad_norm": 1.2720929793887874, | |
| "learning_rate": 3.592814371257485e-06, | |
| "loss": 1.0202, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.03913305237808549, | |
| "grad_norm": 1.2405264880245372, | |
| "learning_rate": 3.892215568862276e-06, | |
| "loss": 1.0059, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.04214328717639976, | |
| "grad_norm": 1.1810599448574401, | |
| "learning_rate": 4.191616766467066e-06, | |
| "loss": 1.0041, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.04515352197471403, | |
| "grad_norm": 1.2433212471597048, | |
| "learning_rate": 4.4910179640718566e-06, | |
| "loss": 0.9915, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.048163756773028296, | |
| "grad_norm": 1.2370160588056265, | |
| "learning_rate": 4.7904191616766475e-06, | |
| "loss": 0.9683, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.051173991571342566, | |
| "grad_norm": 1.2799056245464302, | |
| "learning_rate": 5.0898203592814375e-06, | |
| "loss": 0.9757, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.054184226369656835, | |
| "grad_norm": 1.2232343401620152, | |
| "learning_rate": 5.389221556886228e-06, | |
| "loss": 0.9664, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.057194461167971104, | |
| "grad_norm": 1.2749429574013502, | |
| "learning_rate": 5.6886227544910184e-06, | |
| "loss": 0.9531, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.060204695966285374, | |
| "grad_norm": 1.341905992560118, | |
| "learning_rate": 5.9880239520958085e-06, | |
| "loss": 0.936, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.06321493076459964, | |
| "grad_norm": 1.3312129649486055, | |
| "learning_rate": 6.2874251497005985e-06, | |
| "loss": 0.9288, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.06622516556291391, | |
| "grad_norm": 1.3057249774040953, | |
| "learning_rate": 6.586826347305389e-06, | |
| "loss": 0.9137, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.06923540036122817, | |
| "grad_norm": 1.3480198308811908, | |
| "learning_rate": 6.88622754491018e-06, | |
| "loss": 0.9087, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.07224563515954245, | |
| "grad_norm": 1.2851052893092942, | |
| "learning_rate": 7.18562874251497e-06, | |
| "loss": 0.9006, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.07525586995785671, | |
| "grad_norm": 1.3337166654009118, | |
| "learning_rate": 7.485029940119761e-06, | |
| "loss": 0.8995, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.07826610475617098, | |
| "grad_norm": 1.259701546947682, | |
| "learning_rate": 7.784431137724551e-06, | |
| "loss": 0.877, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.08127633955448525, | |
| "grad_norm": 1.3284638098388712, | |
| "learning_rate": 8.083832335329342e-06, | |
| "loss": 0.8847, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.08428657435279951, | |
| "grad_norm": 1.3016966435741442, | |
| "learning_rate": 8.383233532934131e-06, | |
| "loss": 0.8562, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.08729680915111379, | |
| "grad_norm": 1.272771944986212, | |
| "learning_rate": 8.682634730538922e-06, | |
| "loss": 0.8537, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.09030704394942805, | |
| "grad_norm": 1.2404717495109163, | |
| "learning_rate": 8.982035928143713e-06, | |
| "loss": 0.8402, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.09331727874774233, | |
| "grad_norm": 1.3171631151874839, | |
| "learning_rate": 9.281437125748504e-06, | |
| "loss": 0.8596, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.09632751354605659, | |
| "grad_norm": 1.2739485119594887, | |
| "learning_rate": 9.580838323353295e-06, | |
| "loss": 0.8548, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.09933774834437085, | |
| "grad_norm": 1.2415391381244691, | |
| "learning_rate": 9.880239520958084e-06, | |
| "loss": 0.8348, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.10234798314268513, | |
| "grad_norm": 1.2732863554156713, | |
| "learning_rate": 9.999900509954779e-06, | |
| "loss": 0.8326, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.1053582179409994, | |
| "grad_norm": 1.3293213562766069, | |
| "learning_rate": 9.999292529572152e-06, | |
| "loss": 0.839, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.10836845273931367, | |
| "grad_norm": 1.3011820572360617, | |
| "learning_rate": 9.998131908181262e-06, | |
| "loss": 0.8377, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.11137868753762793, | |
| "grad_norm": 1.2833557284441266, | |
| "learning_rate": 9.996418774081658e-06, | |
| "loss": 0.8228, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.11438892233594221, | |
| "grad_norm": 1.286488000985355, | |
| "learning_rate": 9.994153316649769e-06, | |
| "loss": 0.8318, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.11739915713425647, | |
| "grad_norm": 1.2384872269253941, | |
| "learning_rate": 9.991335786317964e-06, | |
| "loss": 0.8209, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.12040939193257075, | |
| "grad_norm": 1.2713798885562295, | |
| "learning_rate": 9.987966494546873e-06, | |
| "loss": 0.8069, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.12341962673088501, | |
| "grad_norm": 1.2482060631896188, | |
| "learning_rate": 9.984045813790959e-06, | |
| "loss": 0.8142, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.12642986152919927, | |
| "grad_norm": 1.3415986154597055, | |
| "learning_rate": 9.979574177457337e-06, | |
| "loss": 0.813, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.12944009632751355, | |
| "grad_norm": 1.2350908672442804, | |
| "learning_rate": 9.974552079857873e-06, | |
| "loss": 0.8109, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.13245033112582782, | |
| "grad_norm": 1.3964490955664748, | |
| "learning_rate": 9.968980076154533e-06, | |
| "loss": 0.8293, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.13546056592414207, | |
| "grad_norm": 1.2591771311022242, | |
| "learning_rate": 9.962858782298023e-06, | |
| "loss": 0.802, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.13847080072245635, | |
| "grad_norm": 1.288853392082994, | |
| "learning_rate": 9.956188874959686e-06, | |
| "loss": 0.8151, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.14148103552077063, | |
| "grad_norm": 1.197725597237524, | |
| "learning_rate": 9.948971091456715e-06, | |
| "loss": 0.8071, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.1444912703190849, | |
| "grad_norm": 1.2226857692711433, | |
| "learning_rate": 9.941206229670634e-06, | |
| "loss": 0.8053, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.14750150511739915, | |
| "grad_norm": 1.3119146876238303, | |
| "learning_rate": 9.932895147959106e-06, | |
| "loss": 0.7955, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.15051173991571343, | |
| "grad_norm": 1.3326937913135584, | |
| "learning_rate": 9.924038765061042e-06, | |
| "loss": 0.7833, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.1535219747140277, | |
| "grad_norm": 1.2647168837729537, | |
| "learning_rate": 9.91463805999504e-06, | |
| "loss": 0.7933, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.15653220951234195, | |
| "grad_norm": 1.2033595276379918, | |
| "learning_rate": 9.904694071951167e-06, | |
| "loss": 0.8004, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.15954244431065623, | |
| "grad_norm": 1.1708378372158956, | |
| "learning_rate": 9.894207900176074e-06, | |
| "loss": 0.7877, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.1625526791089705, | |
| "grad_norm": 1.277798553181957, | |
| "learning_rate": 9.883180703851488e-06, | |
| "loss": 0.7899, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.16556291390728478, | |
| "grad_norm": 1.207840548329369, | |
| "learning_rate": 9.871613701966067e-06, | |
| "loss": 0.7885, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.16857314870559903, | |
| "grad_norm": 1.156274795904894, | |
| "learning_rate": 9.859508173180653e-06, | |
| "loss": 0.7664, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.1715833835039133, | |
| "grad_norm": 1.164171992678941, | |
| "learning_rate": 9.846865455686915e-06, | |
| "loss": 0.7777, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.17459361830222758, | |
| "grad_norm": 1.1762216211439502, | |
| "learning_rate": 9.833686947059436e-06, | |
| "loss": 0.7727, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.17760385310054183, | |
| "grad_norm": 1.1617067559555396, | |
| "learning_rate": 9.819974104101198e-06, | |
| "loss": 0.7724, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.1806140878988561, | |
| "grad_norm": 1.1855967777933063, | |
| "learning_rate": 9.80572844268256e-06, | |
| "loss": 0.7645, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.18362432269717038, | |
| "grad_norm": 1.1944671571121344, | |
| "learning_rate": 9.790951537573686e-06, | |
| "loss": 0.7819, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.18663455749548466, | |
| "grad_norm": 1.1965936475386203, | |
| "learning_rate": 9.775645022270448e-06, | |
| "loss": 0.778, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.1896447922937989, | |
| "grad_norm": 1.256280786074695, | |
| "learning_rate": 9.759810588813872e-06, | |
| "loss": 0.774, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.19265502709211318, | |
| "grad_norm": 1.3027821404071966, | |
| "learning_rate": 9.743449987603082e-06, | |
| "loss": 0.7682, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.19566526189042746, | |
| "grad_norm": 1.2406739802662574, | |
| "learning_rate": 9.726565027201813e-06, | |
| "loss": 0.788, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.1986754966887417, | |
| "grad_norm": 1.1751549969003265, | |
| "learning_rate": 9.70915757413847e-06, | |
| "loss": 0.7815, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.20168573148705599, | |
| "grad_norm": 1.1964609554923433, | |
| "learning_rate": 9.691229552699817e-06, | |
| "loss": 0.7615, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.20469596628537026, | |
| "grad_norm": 1.2774212868740151, | |
| "learning_rate": 9.672782944718234e-06, | |
| "loss": 0.7677, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.20770620108368454, | |
| "grad_norm": 1.202397374963106, | |
| "learning_rate": 9.65381978935266e-06, | |
| "loss": 0.7671, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.2107164358819988, | |
| "grad_norm": 1.2159818292840359, | |
| "learning_rate": 9.634342182863163e-06, | |
| "loss": 0.7476, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.21372667068031306, | |
| "grad_norm": 1.194513717936444, | |
| "learning_rate": 9.614352278379217e-06, | |
| "loss": 0.7788, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.21673690547862734, | |
| "grad_norm": 1.2060005508903568, | |
| "learning_rate": 9.593852285661684e-06, | |
| "loss": 0.7709, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.2197471402769416, | |
| "grad_norm": 1.203716084337312, | |
| "learning_rate": 9.572844470858537e-06, | |
| "loss": 0.7584, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.22275737507525586, | |
| "grad_norm": 1.282647241669985, | |
| "learning_rate": 9.551331156254358e-06, | |
| "loss": 0.7696, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.22576760987357014, | |
| "grad_norm": 1.2009722082380498, | |
| "learning_rate": 9.529314720013618e-06, | |
| "loss": 0.7526, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.22877784467188442, | |
| "grad_norm": 1.2201198977296086, | |
| "learning_rate": 9.506797595917787e-06, | |
| "loss": 0.7511, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.23178807947019867, | |
| "grad_norm": 1.3415385517504728, | |
| "learning_rate": 9.483782273096295e-06, | |
| "loss": 0.7513, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.23479831426851294, | |
| "grad_norm": 1.26924665240572, | |
| "learning_rate": 9.460271295751373e-06, | |
| "loss": 0.7649, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.23780854906682722, | |
| "grad_norm": 1.2785361560349413, | |
| "learning_rate": 9.436267262876808e-06, | |
| "loss": 0.751, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.2408187838651415, | |
| "grad_norm": 1.1852007444354873, | |
| "learning_rate": 9.411772827970642e-06, | |
| "loss": 0.7548, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.24382901866345574, | |
| "grad_norm": 1.1910921737694808, | |
| "learning_rate": 9.38679069874184e-06, | |
| "loss": 0.7507, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.24683925346177002, | |
| "grad_norm": 1.3053941058405514, | |
| "learning_rate": 9.36132363681097e-06, | |
| "loss": 0.7557, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.2498494882600843, | |
| "grad_norm": 1.2465327608331673, | |
| "learning_rate": 9.335374457404928e-06, | |
| "loss": 0.7649, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.25285972305839854, | |
| "grad_norm": 1.2813369476453156, | |
| "learning_rate": 9.308946029045726e-06, | |
| "loss": 0.7325, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.25586995785671285, | |
| "grad_norm": 1.2539019797830948, | |
| "learning_rate": 9.282041273233402e-06, | |
| "loss": 0.726, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.2588801926550271, | |
| "grad_norm": 1.2685847502715055, | |
| "learning_rate": 9.254663164123052e-06, | |
| "loss": 0.7372, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.26189042745334135, | |
| "grad_norm": 1.2404250071973326, | |
| "learning_rate": 9.226814728196072e-06, | |
| "loss": 0.7333, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.26490066225165565, | |
| "grad_norm": 1.206906014688777, | |
| "learning_rate": 9.198499043925591e-06, | |
| "loss": 0.7305, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.2679108970499699, | |
| "grad_norm": 1.2568238232780307, | |
| "learning_rate": 9.169719241436162e-06, | |
| "loss": 0.7437, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.27092113184828415, | |
| "grad_norm": 1.201047028691787, | |
| "learning_rate": 9.14047850215775e-06, | |
| "loss": 0.7229, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.27393136664659845, | |
| "grad_norm": 1.1628890533112184, | |
| "learning_rate": 9.110780058474052e-06, | |
| "loss": 0.7252, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.2769416014449127, | |
| "grad_norm": 1.222552632587458, | |
| "learning_rate": 9.080627193365155e-06, | |
| "loss": 0.7348, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.27995183624322695, | |
| "grad_norm": 1.204712682838897, | |
| "learning_rate": 9.050023240044649e-06, | |
| "loss": 0.7394, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.28296207104154125, | |
| "grad_norm": 1.1699974198146679, | |
| "learning_rate": 9.018971581591141e-06, | |
| "loss": 0.7172, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.2859723058398555, | |
| "grad_norm": 1.2143354714842383, | |
| "learning_rate": 8.987475650574289e-06, | |
| "loss": 0.7459, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.2889825406381698, | |
| "grad_norm": 1.2344181327548198, | |
| "learning_rate": 8.955538928675343e-06, | |
| "loss": 0.7217, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.29199277543648405, | |
| "grad_norm": 1.2200821617384021, | |
| "learning_rate": 8.923164946302274e-06, | |
| "loss": 0.7355, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.2950030102347983, | |
| "grad_norm": 1.2730755552957604, | |
| "learning_rate": 8.890357282199504e-06, | |
| "loss": 0.7407, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.2980132450331126, | |
| "grad_norm": 1.326965523163219, | |
| "learning_rate": 8.857119563052301e-06, | |
| "loss": 0.7112, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.30102347983142685, | |
| "grad_norm": 1.2881202459959005, | |
| "learning_rate": 8.823455463085873e-06, | |
| "loss": 0.7299, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.3040337146297411, | |
| "grad_norm": 1.3023027317084608, | |
| "learning_rate": 8.789368703659199e-06, | |
| "loss": 0.7291, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.3070439494280554, | |
| "grad_norm": 1.192827763147033, | |
| "learning_rate": 8.754863052853658e-06, | |
| "loss": 0.7242, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.31005418422636966, | |
| "grad_norm": 1.172549917556519, | |
| "learning_rate": 8.719942325056496e-06, | |
| "loss": 0.707, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.3130644190246839, | |
| "grad_norm": 1.1750104521638358, | |
| "learning_rate": 8.68461038053916e-06, | |
| "loss": 0.7152, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.3160746538229982, | |
| "grad_norm": 1.3380189595946483, | |
| "learning_rate": 8.648871125030576e-06, | |
| "loss": 0.711, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.31908488862131246, | |
| "grad_norm": 1.226348488844478, | |
| "learning_rate": 8.612728509285395e-06, | |
| "loss": 0.7104, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.3220951234196267, | |
| "grad_norm": 1.245273272201369, | |
| "learning_rate": 8.576186528647253e-06, | |
| "loss": 0.7286, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.325105358217941, | |
| "grad_norm": 1.1790288496340813, | |
| "learning_rate": 8.53924922260712e-06, | |
| "loss": 0.7075, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.32811559301625526, | |
| "grad_norm": 1.1914412291760281, | |
| "learning_rate": 8.501920674356755e-06, | |
| "loss": 0.7008, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.33112582781456956, | |
| "grad_norm": 1.1806518773814458, | |
| "learning_rate": 8.46420501033733e-06, | |
| "loss": 0.6934, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.3341360626128838, | |
| "grad_norm": 1.2723595572649768, | |
| "learning_rate": 8.42610639978329e-06, | |
| "loss": 0.7019, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.33714629741119806, | |
| "grad_norm": 1.2225157243802531, | |
| "learning_rate": 8.387629054261454e-06, | |
| "loss": 0.7002, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.34015653220951236, | |
| "grad_norm": 1.2003267518231666, | |
| "learning_rate": 8.348777227205462e-06, | |
| "loss": 0.6984, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.3431667670078266, | |
| "grad_norm": 1.1755635510106648, | |
| "learning_rate": 8.309555213445583e-06, | |
| "loss": 0.7123, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.34617700180614086, | |
| "grad_norm": 1.2065493450135722, | |
| "learning_rate": 8.269967348733947e-06, | |
| "loss": 0.6855, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.34918723660445516, | |
| "grad_norm": 1.2455393618923039, | |
| "learning_rate": 8.230018009265255e-06, | |
| "loss": 0.6971, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.3521974714027694, | |
| "grad_norm": 1.3331603991829393, | |
| "learning_rate": 8.189711611193012e-06, | |
| "loss": 0.6985, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.35520770620108366, | |
| "grad_norm": 1.221254992942756, | |
| "learning_rate": 8.149052610141357e-06, | |
| "loss": 0.6843, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.35821794099939797, | |
| "grad_norm": 1.2577154855191386, | |
| "learning_rate": 8.108045500712518e-06, | |
| "loss": 0.7015, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.3612281757977122, | |
| "grad_norm": 1.2678823261096912, | |
| "learning_rate": 8.066694815989961e-06, | |
| "loss": 0.7054, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.36423841059602646, | |
| "grad_norm": 1.3068755574009954, | |
| "learning_rate": 8.025005127037282e-06, | |
| "loss": 0.6909, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.36724864539434077, | |
| "grad_norm": 1.25345913435044, | |
| "learning_rate": 7.982981042392907e-06, | |
| "loss": 0.6804, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.370258880192655, | |
| "grad_norm": 1.2171026231986553, | |
| "learning_rate": 7.940627207560655e-06, | |
| "loss": 0.6792, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.3732691149909693, | |
| "grad_norm": 1.1878268346505996, | |
| "learning_rate": 7.897948304496189e-06, | |
| "loss": 0.7002, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.37627934978928357, | |
| "grad_norm": 1.2021282337780814, | |
| "learning_rate": 7.854949051089467e-06, | |
| "loss": 0.7026, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.3792895845875978, | |
| "grad_norm": 1.219850402255314, | |
| "learning_rate": 7.811634200643202e-06, | |
| "loss": 0.7081, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.3822998193859121, | |
| "grad_norm": 1.25066752923423, | |
| "learning_rate": 7.768008541347423e-06, | |
| "loss": 0.6709, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.38531005418422637, | |
| "grad_norm": 1.233821739163781, | |
| "learning_rate": 7.72407689575016e-06, | |
| "loss": 0.6821, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.3883202889825406, | |
| "grad_norm": 1.2400488427811083, | |
| "learning_rate": 7.67984412022434e-06, | |
| "loss": 0.6797, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.3913305237808549, | |
| "grad_norm": 1.2744104516354757, | |
| "learning_rate": 7.635315104430959e-06, | |
| "loss": 0.6785, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.39434075857916917, | |
| "grad_norm": 1.264910637088745, | |
| "learning_rate": 7.5904947707785434e-06, | |
| "loss": 0.649, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 0.3973509933774834, | |
| "grad_norm": 1.2672765451192602, | |
| "learning_rate": 7.545388073879018e-06, | |
| "loss": 0.6906, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.4003612281757977, | |
| "grad_norm": 1.2685599345911318, | |
| "learning_rate": 7.500000000000001e-06, | |
| "loss": 0.6671, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 0.40337146297411197, | |
| "grad_norm": 1.2358020853032294, | |
| "learning_rate": 7.454335566513603e-06, | |
| "loss": 0.6775, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.4063816977724263, | |
| "grad_norm": 1.316177091586594, | |
| "learning_rate": 7.408399821341787e-06, | |
| "loss": 0.679, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.4093919325707405, | |
| "grad_norm": 1.1839753807419882, | |
| "learning_rate": 7.362197842398355e-06, | |
| "loss": 0.6712, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.4124021673690548, | |
| "grad_norm": 1.2764557732073194, | |
| "learning_rate": 7.315734737027612e-06, | |
| "loss": 0.6719, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 0.4154124021673691, | |
| "grad_norm": 1.2259459384980307, | |
| "learning_rate": 7.2690156414397775e-06, | |
| "loss": 0.657, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.4184226369656833, | |
| "grad_norm": 1.2344897988837857, | |
| "learning_rate": 7.22204572014322e-06, | |
| "loss": 0.6553, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 0.4214328717639976, | |
| "grad_norm": 1.2553454967603142, | |
| "learning_rate": 7.174830165373542e-06, | |
| "loss": 0.6602, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.4244431065623119, | |
| "grad_norm": 1.2370627727074957, | |
| "learning_rate": 7.127374196519616e-06, | |
| "loss": 0.6566, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 0.4274533413606261, | |
| "grad_norm": 1.23827568170562, | |
| "learning_rate": 7.079683059546607e-06, | |
| "loss": 0.6566, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.4304635761589404, | |
| "grad_norm": 1.2717726441044983, | |
| "learning_rate": 7.031762026416074e-06, | |
| "loss": 0.6523, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 0.4334738109572547, | |
| "grad_norm": 1.2414138030074853, | |
| "learning_rate": 6.983616394503177e-06, | |
| "loss": 0.668, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.43648404575556893, | |
| "grad_norm": 1.2863979258950011, | |
| "learning_rate": 6.9352514860110876e-06, | |
| "loss": 0.6723, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 0.4394942805538832, | |
| "grad_norm": 1.1998975466871244, | |
| "learning_rate": 6.886672647382653e-06, | |
| "loss": 0.6496, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.4425045153521975, | |
| "grad_norm": 1.2253729409328844, | |
| "learning_rate": 6.837885248709386e-06, | |
| "loss": 0.6837, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 0.44551475015051173, | |
| "grad_norm": 1.3977058259662651, | |
| "learning_rate": 6.788894683137822e-06, | |
| "loss": 0.6535, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.44852498494882603, | |
| "grad_norm": 1.2252881439154193, | |
| "learning_rate": 6.739706366273346e-06, | |
| "loss": 0.6668, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 0.4515352197471403, | |
| "grad_norm": 1.2685171799824646, | |
| "learning_rate": 6.690325735581532e-06, | |
| "loss": 0.6408, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.45454545454545453, | |
| "grad_norm": 1.2559367914781316, | |
| "learning_rate": 6.640758249787067e-06, | |
| "loss": 0.6551, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 0.45755568934376883, | |
| "grad_norm": 1.3414553027680045, | |
| "learning_rate": 6.591009388270315e-06, | |
| "loss": 0.6483, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.4605659241420831, | |
| "grad_norm": 1.2833745957368083, | |
| "learning_rate": 6.54108465046161e-06, | |
| "loss": 0.6591, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 0.46357615894039733, | |
| "grad_norm": 1.2801896169833944, | |
| "learning_rate": 6.490989555233328e-06, | |
| "loss": 0.6481, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.46658639373871164, | |
| "grad_norm": 1.2982217970316745, | |
| "learning_rate": 6.440729640289809e-06, | |
| "loss": 0.6445, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 0.4695966285370259, | |
| "grad_norm": 1.2399362763796928, | |
| "learning_rate": 6.3903104615551956e-06, | |
| "loss": 0.6428, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.47260686333534013, | |
| "grad_norm": 1.252116968840507, | |
| "learning_rate": 6.3397375925592675e-06, | |
| "loss": 0.621, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 0.47561709813365444, | |
| "grad_norm": 1.1634766201317972, | |
| "learning_rate": 6.289016623821308e-06, | |
| "loss": 0.6396, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.4786273329319687, | |
| "grad_norm": 1.2089828599308985, | |
| "learning_rate": 6.2381531622321234e-06, | |
| "loss": 0.6429, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 0.481637567730283, | |
| "grad_norm": 1.2611778575722072, | |
| "learning_rate": 6.18715283043422e-06, | |
| "loss": 0.6282, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.48464780252859724, | |
| "grad_norm": 1.2634822793060227, | |
| "learning_rate": 6.136021266200271e-06, | |
| "loss": 0.6472, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 0.4876580373269115, | |
| "grad_norm": 1.2731517375276795, | |
| "learning_rate": 6.084764121809878e-06, | |
| "loss": 0.6187, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.4906682721252258, | |
| "grad_norm": 1.2681414747392186, | |
| "learning_rate": 6.033387063424765e-06, | |
| "loss": 0.6318, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 0.49367850692354004, | |
| "grad_norm": 1.2152691698415932, | |
| "learning_rate": 5.9818957704624046e-06, | |
| "loss": 0.6442, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.4966887417218543, | |
| "grad_norm": 1.2163481834250993, | |
| "learning_rate": 5.930295934968197e-06, | |
| "loss": 0.633, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 0.4996989765201686, | |
| "grad_norm": 1.2644071741742902, | |
| "learning_rate": 5.878593260986256e-06, | |
| "loss": 0.6333, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.5027092113184829, | |
| "grad_norm": 1.2998551781398153, | |
| "learning_rate": 5.8267934639288525e-06, | |
| "loss": 0.6377, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 0.5057194461167971, | |
| "grad_norm": 1.2747330171688573, | |
| "learning_rate": 5.77490226994462e-06, | |
| "loss": 0.6385, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.5087296809151114, | |
| "grad_norm": 1.2147166700112426, | |
| "learning_rate": 5.722925415285555e-06, | |
| "loss": 0.6335, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 0.5117399157134257, | |
| "grad_norm": 1.2814771734916943, | |
| "learning_rate": 5.670868645672916e-06, | |
| "loss": 0.6316, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.5147501505117399, | |
| "grad_norm": 1.2118662457570313, | |
| "learning_rate": 5.618737715662067e-06, | |
| "loss": 0.6219, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 0.5177603853100542, | |
| "grad_norm": 1.2322127749127771, | |
| "learning_rate": 5.566538388006351e-06, | |
| "loss": 0.623, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.5207706201083685, | |
| "grad_norm": 1.1991462675329747, | |
| "learning_rate": 5.514276433020044e-06, | |
| "loss": 0.6107, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 0.5237808549066827, | |
| "grad_norm": 1.2149020805519655, | |
| "learning_rate": 5.461957627940489e-06, | |
| "loss": 0.6191, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.526791089704997, | |
| "grad_norm": 1.1877405672596075, | |
| "learning_rate": 5.409587756289462e-06, | |
| "loss": 0.6112, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 0.5298013245033113, | |
| "grad_norm": 1.1970060104579194, | |
| "learning_rate": 5.357172607233831e-06, | |
| "loss": 0.6284, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.5328115593016255, | |
| "grad_norm": 1.368716689528383, | |
| "learning_rate": 5.304717974945596e-06, | |
| "loss": 0.6163, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 0.5358217940999398, | |
| "grad_norm": 1.273293450405195, | |
| "learning_rate": 5.252229657961394e-06, | |
| "loss": 0.6214, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.5388320288982541, | |
| "grad_norm": 1.196878746141804, | |
| "learning_rate": 5.199713458541495e-06, | |
| "loss": 0.6247, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 0.5418422636965683, | |
| "grad_norm": 1.2084713164747305, | |
| "learning_rate": 5.1471751820284e-06, | |
| "loss": 0.6103, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.5448524984948826, | |
| "grad_norm": 1.2196639084600536, | |
| "learning_rate": 5.094620636205096e-06, | |
| "loss": 0.6221, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 0.5478627332931969, | |
| "grad_norm": 1.1988440703906922, | |
| "learning_rate": 5.042055630653042e-06, | |
| "loss": 0.6085, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.5508729680915111, | |
| "grad_norm": 1.26407878369998, | |
| "learning_rate": 4.98948597610996e-06, | |
| "loss": 0.6114, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 0.5538832028898254, | |
| "grad_norm": 1.2541252408565282, | |
| "learning_rate": 4.936917483827483e-06, | |
| "loss": 0.6098, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.5568934376881397, | |
| "grad_norm": 1.2461032064160447, | |
| "learning_rate": 4.884355964928767e-06, | |
| "loss": 0.6288, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 0.5599036724864539, | |
| "grad_norm": 1.1876409214621886, | |
| "learning_rate": 4.831807229766101e-06, | |
| "loss": 0.5967, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.5629139072847682, | |
| "grad_norm": 1.3653959531629782, | |
| "learning_rate": 4.779277087278615e-06, | |
| "loss": 0.5981, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 0.5659241420830825, | |
| "grad_norm": 1.259758029782513, | |
| "learning_rate": 4.7267713443501274e-06, | |
| "loss": 0.6208, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.5689343768813967, | |
| "grad_norm": 1.2512441621206507, | |
| "learning_rate": 4.67429580516724e-06, | |
| "loss": 0.6036, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 0.571944611679711, | |
| "grad_norm": 1.2148856832751866, | |
| "learning_rate": 4.6218562705777185e-06, | |
| "loss": 0.5918, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.5749548464780253, | |
| "grad_norm": 1.2618875581171025, | |
| "learning_rate": 4.5694585374492314e-06, | |
| "loss": 0.5956, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 0.5779650812763396, | |
| "grad_norm": 1.2405407377914555, | |
| "learning_rate": 4.517108398028566e-06, | |
| "loss": 0.6005, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.5809753160746538, | |
| "grad_norm": 1.2775566642146432, | |
| "learning_rate": 4.464811639301314e-06, | |
| "loss": 0.589, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 0.5839855508729681, | |
| "grad_norm": 1.3117899815699137, | |
| "learning_rate": 4.412574042352156e-06, | |
| "loss": 0.5979, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.5869957856712824, | |
| "grad_norm": 1.269362160884011, | |
| "learning_rate": 4.360401381725806e-06, | |
| "loss": 0.5887, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 0.5900060204695966, | |
| "grad_norm": 1.248523016364606, | |
| "learning_rate": 4.308299424788667e-06, | |
| "loss": 0.6022, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.5930162552679109, | |
| "grad_norm": 1.2366195530748443, | |
| "learning_rate": 4.256273931091284e-06, | |
| "loss": 0.6, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 0.5960264900662252, | |
| "grad_norm": 1.222474401781831, | |
| "learning_rate": 4.204330651731662e-06, | |
| "loss": 0.5871, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.5990367248645394, | |
| "grad_norm": 1.2424032915235261, | |
| "learning_rate": 4.152475328719517e-06, | |
| "loss": 0.5783, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 0.6020469596628537, | |
| "grad_norm": 1.2570025380680472, | |
| "learning_rate": 4.1007136943415325e-06, | |
| "loss": 0.5636, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.605057194461168, | |
| "grad_norm": 1.2170323945106316, | |
| "learning_rate": 4.049051470527692e-06, | |
| "loss": 0.601, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 0.6080674292594822, | |
| "grad_norm": 1.1958865931929052, | |
| "learning_rate": 3.997494368218745e-06, | |
| "loss": 0.5878, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.6110776640577965, | |
| "grad_norm": 1.3367291697694699, | |
| "learning_rate": 3.946048086734921e-06, | |
| "loss": 0.5924, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 0.6140878988561108, | |
| "grad_norm": 1.2820724684669875, | |
| "learning_rate": 3.894718313145873e-06, | |
| "loss": 0.5847, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.617098133654425, | |
| "grad_norm": 1.2719469697058394, | |
| "learning_rate": 3.843510721642036e-06, | |
| "loss": 0.5744, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 0.6201083684527393, | |
| "grad_norm": 1.2274652244842972, | |
| "learning_rate": 3.7924309729073616e-06, | |
| "loss": 0.5751, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.6231186032510536, | |
| "grad_norm": 1.3030807337065518, | |
| "learning_rate": 3.7414847134935716e-06, | |
| "loss": 0.5899, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 0.6261288380493678, | |
| "grad_norm": 1.1948808509249107, | |
| "learning_rate": 3.6906775751959667e-06, | |
| "loss": 0.5755, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.6291390728476821, | |
| "grad_norm": 1.2576164970306511, | |
| "learning_rate": 3.640015174430864e-06, | |
| "loss": 0.5682, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 0.6321493076459964, | |
| "grad_norm": 1.2585913656336474, | |
| "learning_rate": 3.5895031116147355e-06, | |
| "loss": 0.5791, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.6351595424443106, | |
| "grad_norm": 1.303783858936202, | |
| "learning_rate": 3.539146970545124e-06, | |
| "loss": 0.5789, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 0.6381697772426249, | |
| "grad_norm": 1.2996679794519685, | |
| "learning_rate": 3.488952317783374e-06, | |
| "loss": 0.5669, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.6411800120409392, | |
| "grad_norm": 1.227591078819204, | |
| "learning_rate": 3.438924702039301e-06, | |
| "loss": 0.5698, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 0.6441902468392534, | |
| "grad_norm": 1.2857752436654992, | |
| "learning_rate": 3.389069653557805e-06, | |
| "loss": 0.5739, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.6472004816375677, | |
| "grad_norm": 1.2421495144623809, | |
| "learning_rate": 3.3393926835075307e-06, | |
| "loss": 0.5788, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 0.650210716435882, | |
| "grad_norm": 1.3331817251764237, | |
| "learning_rate": 3.289899283371657e-06, | |
| "loss": 0.5606, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.6532209512341962, | |
| "grad_norm": 1.2708137446575152, | |
| "learning_rate": 3.240594924340835e-06, | |
| "loss": 0.5615, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 0.6562311860325105, | |
| "grad_norm": 1.2640775164953746, | |
| "learning_rate": 3.1914850567083866e-06, | |
| "loss": 0.565, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.6592414208308248, | |
| "grad_norm": 1.2780803430813366, | |
| "learning_rate": 3.1425751092678064e-06, | |
| "loss": 0.5508, | |
| "step": 1095 | |
| }, | |
| { | |
| "epoch": 0.6622516556291391, | |
| "grad_norm": 1.2424354197819152, | |
| "learning_rate": 3.0938704887126425e-06, | |
| "loss": 0.5541, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.6652618904274533, | |
| "grad_norm": 1.2085302041635138, | |
| "learning_rate": 3.045376579038821e-06, | |
| "loss": 0.5507, | |
| "step": 1105 | |
| }, | |
| { | |
| "epoch": 0.6682721252257676, | |
| "grad_norm": 1.2913791921928985, | |
| "learning_rate": 2.9970987409494784e-06, | |
| "loss": 0.5751, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.6712823600240819, | |
| "grad_norm": 1.2151986171427258, | |
| "learning_rate": 2.9490423112623646e-06, | |
| "loss": 0.5545, | |
| "step": 1115 | |
| }, | |
| { | |
| "epoch": 0.6742925948223961, | |
| "grad_norm": 1.285546926147462, | |
| "learning_rate": 2.9012126023198973e-06, | |
| "loss": 0.5559, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.6773028296207104, | |
| "grad_norm": 1.2516039295058423, | |
| "learning_rate": 2.853614901401909e-06, | |
| "loss": 0.5492, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 0.6803130644190247, | |
| "grad_norm": 1.2619394657656795, | |
| "learning_rate": 2.806254470141174e-06, | |
| "loss": 0.5535, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.6833232992173389, | |
| "grad_norm": 1.263160410510985, | |
| "learning_rate": 2.759136543941773e-06, | |
| "loss": 0.5491, | |
| "step": 1135 | |
| }, | |
| { | |
| "epoch": 0.6863335340156532, | |
| "grad_norm": 1.2808685326776417, | |
| "learning_rate": 2.712266331400332e-06, | |
| "loss": 0.5355, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.6893437688139675, | |
| "grad_norm": 1.2224213775798936, | |
| "learning_rate": 2.66564901373027e-06, | |
| "loss": 0.5497, | |
| "step": 1145 | |
| }, | |
| { | |
| "epoch": 0.6923540036122817, | |
| "grad_norm": 1.2417187357822892, | |
| "learning_rate": 2.6192897441890337e-06, | |
| "loss": 0.5395, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.695364238410596, | |
| "grad_norm": 1.2539889085403395, | |
| "learning_rate": 2.573193647508426e-06, | |
| "loss": 0.5484, | |
| "step": 1155 | |
| }, | |
| { | |
| "epoch": 0.6983744732089103, | |
| "grad_norm": 1.277361909713358, | |
| "learning_rate": 2.5273658193281252e-06, | |
| "loss": 0.5359, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.7013847080072245, | |
| "grad_norm": 1.2511254070143052, | |
| "learning_rate": 2.4818113256323745e-06, | |
| "loss": 0.5549, | |
| "step": 1165 | |
| }, | |
| { | |
| "epoch": 0.7043949428055388, | |
| "grad_norm": 1.3131375870134938, | |
| "learning_rate": 2.4365352021899635e-06, | |
| "loss": 0.5555, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.7074051776038531, | |
| "grad_norm": 1.2825609178954607, | |
| "learning_rate": 2.391542453997578e-06, | |
| "loss": 0.5452, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 0.7104154124021673, | |
| "grad_norm": 1.2877459534486142, | |
| "learning_rate": 2.346838054726505e-06, | |
| "loss": 0.5417, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.7134256472004816, | |
| "grad_norm": 1.2529996046706589, | |
| "learning_rate": 2.302426946172836e-06, | |
| "loss": 0.54, | |
| "step": 1185 | |
| }, | |
| { | |
| "epoch": 0.7164358819987959, | |
| "grad_norm": 1.2558399980367174, | |
| "learning_rate": 2.258314037711184e-06, | |
| "loss": 0.5484, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.7194461167971101, | |
| "grad_norm": 1.272832955666527, | |
| "learning_rate": 2.214504205751971e-06, | |
| "loss": 0.5515, | |
| "step": 1195 | |
| }, | |
| { | |
| "epoch": 0.7224563515954244, | |
| "grad_norm": 1.2469393251594179, | |
| "learning_rate": 2.1710022932023805e-06, | |
| "loss": 0.5519, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.7254665863937387, | |
| "grad_norm": 1.243045028807727, | |
| "learning_rate": 2.127813108931007e-06, | |
| "loss": 0.5438, | |
| "step": 1205 | |
| }, | |
| { | |
| "epoch": 0.7284768211920529, | |
| "grad_norm": 1.2114723202770783, | |
| "learning_rate": 2.084941427236245e-06, | |
| "loss": 0.5435, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.7314870559903672, | |
| "grad_norm": 1.2939587849010044, | |
| "learning_rate": 2.04239198731855e-06, | |
| "loss": 0.5393, | |
| "step": 1215 | |
| }, | |
| { | |
| "epoch": 0.7344972907886815, | |
| "grad_norm": 1.2378436974378162, | |
| "learning_rate": 2.000169492756523e-06, | |
| "loss": 0.5455, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.7375075255869958, | |
| "grad_norm": 1.2231608996738805, | |
| "learning_rate": 1.9582786109869713e-06, | |
| "loss": 0.5421, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 0.74051776038531, | |
| "grad_norm": 1.3275912626430932, | |
| "learning_rate": 1.9167239727889527e-06, | |
| "loss": 0.5361, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.7435279951836243, | |
| "grad_norm": 1.3041604998744816, | |
| "learning_rate": 1.875510171771865e-06, | |
| "loss": 0.5331, | |
| "step": 1235 | |
| }, | |
| { | |
| "epoch": 0.7465382299819386, | |
| "grad_norm": 1.279896389842292, | |
| "learning_rate": 1.8346417638676533e-06, | |
| "loss": 0.5286, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.7495484647802528, | |
| "grad_norm": 1.309403322072463, | |
| "learning_rate": 1.7941232668271863e-06, | |
| "loss": 0.5479, | |
| "step": 1245 | |
| }, | |
| { | |
| "epoch": 0.7525586995785671, | |
| "grad_norm": 1.2455386392310572, | |
| "learning_rate": 1.753959159720836e-06, | |
| "loss": 0.5353, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.7555689343768814, | |
| "grad_norm": 1.2637146932290357, | |
| "learning_rate": 1.7141538824433506e-06, | |
| "loss": 0.5349, | |
| "step": 1255 | |
| }, | |
| { | |
| "epoch": 0.7585791691751956, | |
| "grad_norm": 1.258655302158318, | |
| "learning_rate": 1.6747118352230495e-06, | |
| "loss": 0.538, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.7615894039735099, | |
| "grad_norm": 1.2756271345276968, | |
| "learning_rate": 1.6356373781354058e-06, | |
| "loss": 0.5167, | |
| "step": 1265 | |
| }, | |
| { | |
| "epoch": 0.7645996387718242, | |
| "grad_norm": 1.2413970049455438, | |
| "learning_rate": 1.5969348306210692e-06, | |
| "loss": 0.5309, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.7676098735701384, | |
| "grad_norm": 1.3035393545030223, | |
| "learning_rate": 1.5586084710083737e-06, | |
| "loss": 0.5239, | |
| "step": 1275 | |
| }, | |
| { | |
| "epoch": 0.7706201083684527, | |
| "grad_norm": 1.2294394327286127, | |
| "learning_rate": 1.5206625360403943e-06, | |
| "loss": 0.5305, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.773630343166767, | |
| "grad_norm": 1.2174444367598383, | |
| "learning_rate": 1.4831012204066114e-06, | |
| "loss": 0.531, | |
| "step": 1285 | |
| }, | |
| { | |
| "epoch": 0.7766405779650812, | |
| "grad_norm": 1.2366691168478938, | |
| "learning_rate": 1.445928676279199e-06, | |
| "loss": 0.518, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.7796508127633955, | |
| "grad_norm": 1.264298324101515, | |
| "learning_rate": 1.4091490128540374e-06, | |
| "loss": 0.5068, | |
| "step": 1295 | |
| }, | |
| { | |
| "epoch": 0.7826610475617098, | |
| "grad_norm": 1.2332308291363379, | |
| "learning_rate": 1.3727662958964627e-06, | |
| "loss": 0.5203, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.785671282360024, | |
| "grad_norm": 1.2299329633898903, | |
| "learning_rate": 1.3367845472918272e-06, | |
| "loss": 0.5298, | |
| "step": 1305 | |
| }, | |
| { | |
| "epoch": 0.7886815171583383, | |
| "grad_norm": 1.2504340191558994, | |
| "learning_rate": 1.3012077446008969e-06, | |
| "loss": 0.5248, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.7916917519566526, | |
| "grad_norm": 1.3049347135811968, | |
| "learning_rate": 1.266039820620159e-06, | |
| "loss": 0.523, | |
| "step": 1315 | |
| }, | |
| { | |
| "epoch": 0.7947019867549668, | |
| "grad_norm": 1.2179046011678003, | |
| "learning_rate": 1.2312846629470826e-06, | |
| "loss": 0.4974, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.7977122215532811, | |
| "grad_norm": 1.3164892931071699, | |
| "learning_rate": 1.1969461135503573e-06, | |
| "loss": 0.526, | |
| "step": 1325 | |
| }, | |
| { | |
| "epoch": 0.8007224563515954, | |
| "grad_norm": 1.271375375821384, | |
| "learning_rate": 1.163027968345195e-06, | |
| "loss": 0.5161, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.8037326911499096, | |
| "grad_norm": 1.3139938511991043, | |
| "learning_rate": 1.1295339767737125e-06, | |
| "loss": 0.5113, | |
| "step": 1335 | |
| }, | |
| { | |
| "epoch": 0.8067429259482239, | |
| "grad_norm": 1.286007032144441, | |
| "learning_rate": 1.0964678413904529e-06, | |
| "loss": 0.5217, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.8097531607465382, | |
| "grad_norm": 1.3030418958577936, | |
| "learning_rate": 1.0638332174530953e-06, | |
| "loss": 0.5326, | |
| "step": 1345 | |
| }, | |
| { | |
| "epoch": 0.8127633955448526, | |
| "grad_norm": 1.2573877263232616, | |
| "learning_rate": 1.0316337125183817e-06, | |
| "loss": 0.5172, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.8157736303431667, | |
| "grad_norm": 1.3008639038029295, | |
| "learning_rate": 9.998728860433277e-07, | |
| "loss": 0.512, | |
| "step": 1355 | |
| }, | |
| { | |
| "epoch": 0.818783865141481, | |
| "grad_norm": 1.2333298206728438, | |
| "learning_rate": 9.685542489917494e-07, | |
| "loss": 0.5047, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.8217940999397954, | |
| "grad_norm": 1.289522737500026, | |
| "learning_rate": 9.376812634461418e-07, | |
| "loss": 0.5104, | |
| "step": 1365 | |
| }, | |
| { | |
| "epoch": 0.8248043347381095, | |
| "grad_norm": 1.2659811011921933, | |
| "learning_rate": 9.072573422249692e-07, | |
| "loss": 0.5232, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.8278145695364238, | |
| "grad_norm": 1.22881385464476, | |
| "learning_rate": 8.772858485054042e-07, | |
| "loss": 0.5199, | |
| "step": 1375 | |
| }, | |
| { | |
| "epoch": 0.8308248043347382, | |
| "grad_norm": 1.2715826823700018, | |
| "learning_rate": 8.477700954515372e-07, | |
| "loss": 0.5202, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.8338350391330523, | |
| "grad_norm": 1.2503742943128184, | |
| "learning_rate": 8.187133458481416e-07, | |
| "loss": 0.5285, | |
| "step": 1385 | |
| }, | |
| { | |
| "epoch": 0.8368452739313667, | |
| "grad_norm": 1.2943224950483547, | |
| "learning_rate": 7.901188117399817e-07, | |
| "loss": 0.5146, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.839855508729681, | |
| "grad_norm": 1.2465983369075755, | |
| "learning_rate": 7.619896540767435e-07, | |
| "loss": 0.5214, | |
| "step": 1395 | |
| }, | |
| { | |
| "epoch": 0.8428657435279951, | |
| "grad_norm": 1.2931198121674863, | |
| "learning_rate": 7.343289823636168e-07, | |
| "loss": 0.5103, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.8458759783263095, | |
| "grad_norm": 1.3126713125625864, | |
| "learning_rate": 7.0713985431755e-07, | |
| "loss": 0.5131, | |
| "step": 1405 | |
| }, | |
| { | |
| "epoch": 0.8488862131246238, | |
| "grad_norm": 1.2514737845776198, | |
| "learning_rate": 6.804252755292429e-07, | |
| "loss": 0.4972, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.851896447922938, | |
| "grad_norm": 1.2888999488065467, | |
| "learning_rate": 6.541881991309013e-07, | |
| "loss": 0.5086, | |
| "step": 1415 | |
| }, | |
| { | |
| "epoch": 0.8549066827212523, | |
| "grad_norm": 1.2929120768720084, | |
| "learning_rate": 6.284315254697726e-07, | |
| "loss": 0.5131, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.8579169175195666, | |
| "grad_norm": 1.2348017732169416, | |
| "learning_rate": 6.031581017875482e-07, | |
| "loss": 0.5147, | |
| "step": 1425 | |
| }, | |
| { | |
| "epoch": 0.8609271523178808, | |
| "grad_norm": 1.2685705494636663, | |
| "learning_rate": 5.783707219056078e-07, | |
| "loss": 0.5092, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.863937387116195, | |
| "grad_norm": 1.2976102586906246, | |
| "learning_rate": 5.540721259161774e-07, | |
| "loss": 0.5115, | |
| "step": 1435 | |
| }, | |
| { | |
| "epoch": 0.8669476219145094, | |
| "grad_norm": 1.250337429227757, | |
| "learning_rate": 5.302649998794368e-07, | |
| "loss": 0.5169, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.8699578567128236, | |
| "grad_norm": 1.2461623782975149, | |
| "learning_rate": 5.0695197552659e-07, | |
| "loss": 0.5078, | |
| "step": 1445 | |
| }, | |
| { | |
| "epoch": 0.8729680915111379, | |
| "grad_norm": 1.2782693319086738, | |
| "learning_rate": 4.841356299689359e-07, | |
| "loss": 0.5157, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.8759783263094522, | |
| "grad_norm": 1.2663091436447071, | |
| "learning_rate": 4.618184854129981e-07, | |
| "loss": 0.5094, | |
| "step": 1455 | |
| }, | |
| { | |
| "epoch": 0.8789885611077664, | |
| "grad_norm": 1.2889286164793572, | |
| "learning_rate": 4.4000300888169753e-07, | |
| "loss": 0.5097, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.8819987959060807, | |
| "grad_norm": 1.270161020673374, | |
| "learning_rate": 4.1869161194164565e-07, | |
| "loss": 0.5178, | |
| "step": 1465 | |
| }, | |
| { | |
| "epoch": 0.885009030704395, | |
| "grad_norm": 1.3028978532412478, | |
| "learning_rate": 3.9788665043656083e-07, | |
| "loss": 0.5071, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.8880192655027093, | |
| "grad_norm": 1.2451090536200642, | |
| "learning_rate": 3.775904242268391e-07, | |
| "loss": 0.5161, | |
| "step": 1475 | |
| }, | |
| { | |
| "epoch": 0.8910295003010235, | |
| "grad_norm": 1.3526909015751019, | |
| "learning_rate": 3.578051769353219e-07, | |
| "loss": 0.5156, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.8940397350993378, | |
| "grad_norm": 1.349791093552624, | |
| "learning_rate": 3.385330956992816e-07, | |
| "loss": 0.5163, | |
| "step": 1485 | |
| }, | |
| { | |
| "epoch": 0.8970499698976521, | |
| "grad_norm": 1.274654975609038, | |
| "learning_rate": 3.1977631092863613e-07, | |
| "loss": 0.5085, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.9000602046959663, | |
| "grad_norm": 1.2723546077606147, | |
| "learning_rate": 3.015368960704584e-07, | |
| "loss": 0.4992, | |
| "step": 1495 | |
| }, | |
| { | |
| "epoch": 0.9030704394942806, | |
| "grad_norm": 1.2492243057949244, | |
| "learning_rate": 2.8381686737975867e-07, | |
| "loss": 0.5117, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.9060806742925949, | |
| "grad_norm": 1.2506347579447048, | |
| "learning_rate": 2.666181836966053e-07, | |
| "loss": 0.5061, | |
| "step": 1505 | |
| }, | |
| { | |
| "epoch": 0.9090909090909091, | |
| "grad_norm": 1.2651285126889307, | |
| "learning_rate": 2.4994274622958726e-07, | |
| "loss": 0.5117, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.9121011438892234, | |
| "grad_norm": 1.2928036851193043, | |
| "learning_rate": 2.3379239834564526e-07, | |
| "loss": 0.5052, | |
| "step": 1515 | |
| }, | |
| { | |
| "epoch": 0.9151113786875377, | |
| "grad_norm": 1.327963392569787, | |
| "learning_rate": 2.1816892536629775e-07, | |
| "loss": 0.505, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.9181216134858519, | |
| "grad_norm": 1.2391568358607692, | |
| "learning_rate": 2.0307405437029027e-07, | |
| "loss": 0.4938, | |
| "step": 1525 | |
| }, | |
| { | |
| "epoch": 0.9211318482841662, | |
| "grad_norm": 1.2967086784362822, | |
| "learning_rate": 1.8850945400266994e-07, | |
| "loss": 0.5108, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.9241420830824805, | |
| "grad_norm": 1.29545735590143, | |
| "learning_rate": 1.7447673429033361e-07, | |
| "loss": 0.5032, | |
| "step": 1535 | |
| }, | |
| { | |
| "epoch": 0.9271523178807947, | |
| "grad_norm": 1.2875158001193772, | |
| "learning_rate": 1.6097744646404457e-07, | |
| "loss": 0.5107, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.930162552679109, | |
| "grad_norm": 1.266148830144139, | |
| "learning_rate": 1.4801308278695636e-07, | |
| "loss": 0.5074, | |
| "step": 1545 | |
| }, | |
| { | |
| "epoch": 0.9331727874774233, | |
| "grad_norm": 1.3306638459879772, | |
| "learning_rate": 1.3558507638965158e-07, | |
| "loss": 0.507, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.9361830222757375, | |
| "grad_norm": 1.333069277227204, | |
| "learning_rate": 1.2369480111171784e-07, | |
| "loss": 0.5015, | |
| "step": 1555 | |
| }, | |
| { | |
| "epoch": 0.9391932570740518, | |
| "grad_norm": 1.3040224501040143, | |
| "learning_rate": 1.1234357134987717e-07, | |
| "loss": 0.498, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.9422034918723661, | |
| "grad_norm": 1.2723009240100882, | |
| "learning_rate": 1.0153264191269052e-07, | |
| "loss": 0.522, | |
| "step": 1565 | |
| }, | |
| { | |
| "epoch": 0.9452137266706803, | |
| "grad_norm": 1.2733119108501816, | |
| "learning_rate": 9.126320788184374e-08, | |
| "loss": 0.5084, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.9482239614689946, | |
| "grad_norm": 1.2479278083881131, | |
| "learning_rate": 8.153640448003875e-08, | |
| "loss": 0.4962, | |
| "step": 1575 | |
| }, | |
| { | |
| "epoch": 0.9512341962673089, | |
| "grad_norm": 1.2959385578328415, | |
| "learning_rate": 7.235330694550402e-08, | |
| "loss": 0.5114, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.9542444310656231, | |
| "grad_norm": 1.231634916892265, | |
| "learning_rate": 6.371493041313126e-08, | |
| "loss": 0.4951, | |
| "step": 1585 | |
| }, | |
| { | |
| "epoch": 0.9572546658639374, | |
| "grad_norm": 1.27585215530384, | |
| "learning_rate": 5.562222980225907e-08, | |
| "loss": 0.5071, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.9602649006622517, | |
| "grad_norm": 1.225825933770062, | |
| "learning_rate": 4.807609971111238e-08, | |
| "loss": 0.4971, | |
| "step": 1595 | |
| }, | |
| { | |
| "epoch": 0.963275135460566, | |
| "grad_norm": 1.2453682011592957, | |
| "learning_rate": 4.107737431791159e-08, | |
| "loss": 0.5054, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.9662853702588802, | |
| "grad_norm": 1.2857076641073053, | |
| "learning_rate": 3.462682728865685e-08, | |
| "loss": 0.5043, | |
| "step": 1605 | |
| }, | |
| { | |
| "epoch": 0.9692956050571945, | |
| "grad_norm": 1.2778563737126165, | |
| "learning_rate": 2.8725171691605934e-08, | |
| "loss": 0.5075, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.9723058398555088, | |
| "grad_norm": 1.2554905316985445, | |
| "learning_rate": 2.3373059918448958e-08, | |
| "loss": 0.4938, | |
| "step": 1615 | |
| }, | |
| { | |
| "epoch": 0.975316074653823, | |
| "grad_norm": 1.3192898914729378, | |
| "learning_rate": 1.8571083612188845e-08, | |
| "loss": 0.5088, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.9783263094521373, | |
| "grad_norm": 1.2731885499582947, | |
| "learning_rate": 1.431977360173975e-08, | |
| "loss": 0.4969, | |
| "step": 1625 | |
| }, | |
| { | |
| "epoch": 0.9813365442504516, | |
| "grad_norm": 1.304095478961953, | |
| "learning_rate": 1.0619599843249006e-08, | |
| "loss": 0.5139, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.9843467790487658, | |
| "grad_norm": 1.27242623092078, | |
| "learning_rate": 7.470971368142011e-09, | |
| "loss": 0.5156, | |
| "step": 1635 | |
| }, | |
| { | |
| "epoch": 0.9873570138470801, | |
| "grad_norm": 1.239577403155516, | |
| "learning_rate": 4.874236237911723e-09, | |
| "loss": 0.5042, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.9903672486453944, | |
| "grad_norm": 1.2559422923667112, | |
| "learning_rate": 2.8296815056377824e-09, | |
| "loss": 0.5124, | |
| "step": 1645 | |
| }, | |
| { | |
| "epoch": 0.9933774834437086, | |
| "grad_norm": 1.2978012230245286, | |
| "learning_rate": 1.3375331842574446e-09, | |
| "loss": 0.5151, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.9963877182420229, | |
| "grad_norm": 1.2286869775237814, | |
| "learning_rate": 3.9795622158111945e-10, | |
| "loss": 0.5093, | |
| "step": 1655 | |
| }, | |
| { | |
| "epoch": 0.9993979530403372, | |
| "grad_norm": 1.2619068290294162, | |
| "learning_rate": 1.1054482056405136e-11, | |
| "loss": 0.5114, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_runtime": 3.8298, | |
| "eval_samples_per_second": 2.611, | |
| "eval_steps_per_second": 0.783, | |
| "step": 1661 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "step": 1661, | |
| "total_flos": 430711668473856.0, | |
| "train_loss": 0.669738974236064, | |
| "train_runtime": 16716.2181, | |
| "train_samples_per_second": 1.59, | |
| "train_steps_per_second": 0.099 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 1661, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 100, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 430711668473856.0, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |