| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 3.0, | |
| "eval_steps": 500, | |
| "global_step": 939, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.003194888178913738, | |
| "grad_norm": 6.409714948226888, | |
| "learning_rate": 1.0638297872340426e-07, | |
| "loss": 0.9383, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.006389776357827476, | |
| "grad_norm": 5.882820413352701, | |
| "learning_rate": 2.1276595744680852e-07, | |
| "loss": 0.8501, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.009584664536741214, | |
| "grad_norm": 5.957247801225133, | |
| "learning_rate": 3.1914893617021275e-07, | |
| "loss": 0.8875, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.012779552715654952, | |
| "grad_norm": 5.724998203884164, | |
| "learning_rate": 4.2553191489361704e-07, | |
| "loss": 0.8129, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.01597444089456869, | |
| "grad_norm": 6.23206077824922, | |
| "learning_rate": 5.319148936170213e-07, | |
| "loss": 0.8894, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.019169329073482427, | |
| "grad_norm": 6.15372248339717, | |
| "learning_rate": 6.382978723404255e-07, | |
| "loss": 0.8576, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.022364217252396165, | |
| "grad_norm": 6.07845232983042, | |
| "learning_rate": 7.446808510638298e-07, | |
| "loss": 0.8891, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.025559105431309903, | |
| "grad_norm": 5.63391011618048, | |
| "learning_rate": 8.510638297872341e-07, | |
| "loss": 0.8551, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.02875399361022364, | |
| "grad_norm": 5.534522933447544, | |
| "learning_rate": 9.574468085106384e-07, | |
| "loss": 0.8428, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.03194888178913738, | |
| "grad_norm": 5.418239884527311, | |
| "learning_rate": 1.0638297872340427e-06, | |
| "loss": 0.8059, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.03514376996805112, | |
| "grad_norm": 4.9589364905240805, | |
| "learning_rate": 1.170212765957447e-06, | |
| "loss": 0.8492, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.038338658146964855, | |
| "grad_norm": 4.799766698395383, | |
| "learning_rate": 1.276595744680851e-06, | |
| "loss": 0.879, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.04153354632587859, | |
| "grad_norm": 4.519154133051823, | |
| "learning_rate": 1.3829787234042555e-06, | |
| "loss": 0.824, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.04472843450479233, | |
| "grad_norm": 2.836911275221638, | |
| "learning_rate": 1.4893617021276596e-06, | |
| "loss": 0.7959, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.04792332268370607, | |
| "grad_norm": 2.436716671685153, | |
| "learning_rate": 1.595744680851064e-06, | |
| "loss": 0.7458, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.051118210862619806, | |
| "grad_norm": 2.5306910044508966, | |
| "learning_rate": 1.7021276595744682e-06, | |
| "loss": 0.7691, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.054313099041533544, | |
| "grad_norm": 2.223202815090778, | |
| "learning_rate": 1.8085106382978727e-06, | |
| "loss": 0.7256, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.05750798722044728, | |
| "grad_norm": 2.0892129964533477, | |
| "learning_rate": 1.9148936170212767e-06, | |
| "loss": 0.7825, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.06070287539936102, | |
| "grad_norm": 2.4490319759010633, | |
| "learning_rate": 2.021276595744681e-06, | |
| "loss": 0.7727, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.06389776357827476, | |
| "grad_norm": 2.8593345098103855, | |
| "learning_rate": 2.1276595744680853e-06, | |
| "loss": 0.7154, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.0670926517571885, | |
| "grad_norm": 2.9984640074592877, | |
| "learning_rate": 2.2340425531914894e-06, | |
| "loss": 0.7033, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.07028753993610223, | |
| "grad_norm": 2.81387823783635, | |
| "learning_rate": 2.340425531914894e-06, | |
| "loss": 0.6523, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.07348242811501597, | |
| "grad_norm": 3.018564967672109, | |
| "learning_rate": 2.446808510638298e-06, | |
| "loss": 0.7746, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.07667731629392971, | |
| "grad_norm": 2.856610406420041, | |
| "learning_rate": 2.553191489361702e-06, | |
| "loss": 0.7548, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.07987220447284345, | |
| "grad_norm": 2.331383834536258, | |
| "learning_rate": 2.6595744680851065e-06, | |
| "loss": 0.6684, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.08306709265175719, | |
| "grad_norm": 2.1989383298002485, | |
| "learning_rate": 2.765957446808511e-06, | |
| "loss": 0.7405, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.08626198083067092, | |
| "grad_norm": 1.5941806085953258, | |
| "learning_rate": 2.8723404255319155e-06, | |
| "loss": 0.6736, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.08945686900958466, | |
| "grad_norm": 1.5505780124124287, | |
| "learning_rate": 2.978723404255319e-06, | |
| "loss": 0.6973, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.0926517571884984, | |
| "grad_norm": 1.4789842434436071, | |
| "learning_rate": 3.0851063829787237e-06, | |
| "loss": 0.7071, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.09584664536741214, | |
| "grad_norm": 1.3346837979890005, | |
| "learning_rate": 3.191489361702128e-06, | |
| "loss": 0.6911, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.09904153354632587, | |
| "grad_norm": 1.4475279708550588, | |
| "learning_rate": 3.297872340425532e-06, | |
| "loss": 0.6605, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.10223642172523961, | |
| "grad_norm": 1.5471942641165661, | |
| "learning_rate": 3.4042553191489363e-06, | |
| "loss": 0.6891, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.10543130990415335, | |
| "grad_norm": 1.2336120130098014, | |
| "learning_rate": 3.510638297872341e-06, | |
| "loss": 0.6334, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.10862619808306709, | |
| "grad_norm": 1.2579672960093606, | |
| "learning_rate": 3.6170212765957453e-06, | |
| "loss": 0.6557, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.11182108626198083, | |
| "grad_norm": 1.2231859178637319, | |
| "learning_rate": 3.723404255319149e-06, | |
| "loss": 0.6491, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.11501597444089456, | |
| "grad_norm": 1.1385946023643831, | |
| "learning_rate": 3.8297872340425535e-06, | |
| "loss": 0.5842, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.1182108626198083, | |
| "grad_norm": 0.9165940060653626, | |
| "learning_rate": 3.936170212765958e-06, | |
| "loss": 0.6432, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.12140575079872204, | |
| "grad_norm": 1.0311853588560047, | |
| "learning_rate": 4.042553191489362e-06, | |
| "loss": 0.6392, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.12460063897763578, | |
| "grad_norm": 1.1972768617611755, | |
| "learning_rate": 4.148936170212766e-06, | |
| "loss": 0.6042, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.12779552715654952, | |
| "grad_norm": 1.1467995823095627, | |
| "learning_rate": 4.255319148936171e-06, | |
| "loss": 0.5756, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.13099041533546327, | |
| "grad_norm": 1.2232522924355278, | |
| "learning_rate": 4.361702127659575e-06, | |
| "loss": 0.6, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.134185303514377, | |
| "grad_norm": 0.9926550062713587, | |
| "learning_rate": 4.468085106382979e-06, | |
| "loss": 0.6201, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.13738019169329074, | |
| "grad_norm": 0.9837243414486642, | |
| "learning_rate": 4.574468085106383e-06, | |
| "loss": 0.6119, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.14057507987220447, | |
| "grad_norm": 0.7421864380116049, | |
| "learning_rate": 4.680851063829788e-06, | |
| "loss": 0.5856, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.14376996805111822, | |
| "grad_norm": 0.9035767642840998, | |
| "learning_rate": 4.787234042553192e-06, | |
| "loss": 0.6272, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.14696485623003194, | |
| "grad_norm": 1.0213511082377889, | |
| "learning_rate": 4.893617021276596e-06, | |
| "loss": 0.5995, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.1501597444089457, | |
| "grad_norm": 0.8830146417372309, | |
| "learning_rate": 5e-06, | |
| "loss": 0.6345, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.15335463258785942, | |
| "grad_norm": 0.791319537107259, | |
| "learning_rate": 5.106382978723404e-06, | |
| "loss": 0.6101, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.15654952076677317, | |
| "grad_norm": 0.8284190199994919, | |
| "learning_rate": 5.212765957446809e-06, | |
| "loss": 0.6044, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.1597444089456869, | |
| "grad_norm": 0.9510398823607971, | |
| "learning_rate": 5.319148936170213e-06, | |
| "loss": 0.5714, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.16293929712460065, | |
| "grad_norm": 0.9998952434573138, | |
| "learning_rate": 5.425531914893617e-06, | |
| "loss": 0.6129, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.16613418530351437, | |
| "grad_norm": 0.9860559729186523, | |
| "learning_rate": 5.531914893617022e-06, | |
| "loss": 0.6297, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.16932907348242812, | |
| "grad_norm": 0.9463323345389851, | |
| "learning_rate": 5.638297872340426e-06, | |
| "loss": 0.5753, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.17252396166134185, | |
| "grad_norm": 0.8257562888308732, | |
| "learning_rate": 5.744680851063831e-06, | |
| "loss": 0.5689, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.1757188498402556, | |
| "grad_norm": 0.9121399111640456, | |
| "learning_rate": 5.851063829787235e-06, | |
| "loss": 0.5864, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.17891373801916932, | |
| "grad_norm": 0.7597161760676522, | |
| "learning_rate": 5.957446808510638e-06, | |
| "loss": 0.549, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.18210862619808307, | |
| "grad_norm": 1.0158634395768724, | |
| "learning_rate": 6.063829787234044e-06, | |
| "loss": 0.5829, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.1853035143769968, | |
| "grad_norm": 0.8684762573966118, | |
| "learning_rate": 6.170212765957447e-06, | |
| "loss": 0.5608, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.18849840255591055, | |
| "grad_norm": 0.8595474370965593, | |
| "learning_rate": 6.276595744680851e-06, | |
| "loss": 0.5806, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.19169329073482427, | |
| "grad_norm": 0.8573534321100813, | |
| "learning_rate": 6.382978723404256e-06, | |
| "loss": 0.5736, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.19488817891373802, | |
| "grad_norm": 0.8739703998606017, | |
| "learning_rate": 6.48936170212766e-06, | |
| "loss": 0.5716, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.19808306709265175, | |
| "grad_norm": 0.8937605093917412, | |
| "learning_rate": 6.595744680851064e-06, | |
| "loss": 0.5287, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.2012779552715655, | |
| "grad_norm": 0.7969931244942438, | |
| "learning_rate": 6.702127659574469e-06, | |
| "loss": 0.5664, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.20447284345047922, | |
| "grad_norm": 0.7915012047099632, | |
| "learning_rate": 6.808510638297873e-06, | |
| "loss": 0.5622, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.20766773162939298, | |
| "grad_norm": 0.8806575952893214, | |
| "learning_rate": 6.914893617021278e-06, | |
| "loss": 0.5966, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.2108626198083067, | |
| "grad_norm": 0.973404020541715, | |
| "learning_rate": 7.021276595744682e-06, | |
| "loss": 0.5325, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.21405750798722045, | |
| "grad_norm": 0.8197327493931416, | |
| "learning_rate": 7.127659574468085e-06, | |
| "loss": 0.5378, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.21725239616613418, | |
| "grad_norm": 0.8154232260544038, | |
| "learning_rate": 7.234042553191491e-06, | |
| "loss": 0.5294, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.22044728434504793, | |
| "grad_norm": 0.8048146414091009, | |
| "learning_rate": 7.340425531914894e-06, | |
| "loss": 0.5835, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.22364217252396165, | |
| "grad_norm": 0.9431792825447465, | |
| "learning_rate": 7.446808510638298e-06, | |
| "loss": 0.6016, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.2268370607028754, | |
| "grad_norm": 0.9042800238596417, | |
| "learning_rate": 7.553191489361703e-06, | |
| "loss": 0.5148, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.23003194888178913, | |
| "grad_norm": 0.7370561189551593, | |
| "learning_rate": 7.659574468085107e-06, | |
| "loss": 0.5407, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.23322683706070288, | |
| "grad_norm": 0.8525656922874479, | |
| "learning_rate": 7.765957446808511e-06, | |
| "loss": 0.541, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.2364217252396166, | |
| "grad_norm": 0.8221124716437727, | |
| "learning_rate": 7.872340425531916e-06, | |
| "loss": 0.5215, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.23961661341853036, | |
| "grad_norm": 0.777647704665899, | |
| "learning_rate": 7.97872340425532e-06, | |
| "loss": 0.5259, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.24281150159744408, | |
| "grad_norm": 0.7599909140676149, | |
| "learning_rate": 8.085106382978723e-06, | |
| "loss": 0.5485, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.24600638977635783, | |
| "grad_norm": 0.685227020557745, | |
| "learning_rate": 8.191489361702128e-06, | |
| "loss": 0.498, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.24920127795527156, | |
| "grad_norm": 0.8431319429416056, | |
| "learning_rate": 8.297872340425532e-06, | |
| "loss": 0.5384, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.2523961661341853, | |
| "grad_norm": 0.8795817172427665, | |
| "learning_rate": 8.404255319148937e-06, | |
| "loss": 0.5514, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.25559105431309903, | |
| "grad_norm": 0.8078101765493096, | |
| "learning_rate": 8.510638297872341e-06, | |
| "loss": 0.5093, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.25878594249201275, | |
| "grad_norm": 0.7660885386250705, | |
| "learning_rate": 8.617021276595746e-06, | |
| "loss": 0.5594, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.26198083067092653, | |
| "grad_norm": 0.8652722799288994, | |
| "learning_rate": 8.72340425531915e-06, | |
| "loss": 0.5729, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.26517571884984026, | |
| "grad_norm": 0.8601708327765593, | |
| "learning_rate": 8.829787234042555e-06, | |
| "loss": 0.5233, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.268370607028754, | |
| "grad_norm": 0.8309693856068712, | |
| "learning_rate": 8.936170212765958e-06, | |
| "loss": 0.4808, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.2715654952076677, | |
| "grad_norm": 0.9088041717182157, | |
| "learning_rate": 9.042553191489362e-06, | |
| "loss": 0.5767, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.2747603833865815, | |
| "grad_norm": 0.908456866251118, | |
| "learning_rate": 9.148936170212767e-06, | |
| "loss": 0.5543, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.2779552715654952, | |
| "grad_norm": 0.9219476257453414, | |
| "learning_rate": 9.255319148936171e-06, | |
| "loss": 0.5555, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.28115015974440893, | |
| "grad_norm": 0.9639792987268722, | |
| "learning_rate": 9.361702127659576e-06, | |
| "loss": 0.5871, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.28434504792332266, | |
| "grad_norm": 0.9084931124261426, | |
| "learning_rate": 9.46808510638298e-06, | |
| "loss": 0.4884, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.28753993610223644, | |
| "grad_norm": 0.7606675941137307, | |
| "learning_rate": 9.574468085106385e-06, | |
| "loss": 0.5232, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.29073482428115016, | |
| "grad_norm": 1.1309944787796808, | |
| "learning_rate": 9.680851063829787e-06, | |
| "loss": 0.5364, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.2939297124600639, | |
| "grad_norm": 1.0148322322106438, | |
| "learning_rate": 9.787234042553192e-06, | |
| "loss": 0.502, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.2971246006389776, | |
| "grad_norm": 0.8194146060783313, | |
| "learning_rate": 9.893617021276596e-06, | |
| "loss": 0.5379, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.3003194888178914, | |
| "grad_norm": 0.9444330231028176, | |
| "learning_rate": 1e-05, | |
| "loss": 0.5857, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.3035143769968051, | |
| "grad_norm": 1.0778206337140743, | |
| "learning_rate": 9.999965443811378e-06, | |
| "loss": 0.5236, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.30670926517571884, | |
| "grad_norm": 1.0986290906153802, | |
| "learning_rate": 9.999861775723162e-06, | |
| "loss": 0.5526, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.30990415335463256, | |
| "grad_norm": 0.9078029833378195, | |
| "learning_rate": 9.999688997168301e-06, | |
| "loss": 0.518, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.31309904153354634, | |
| "grad_norm": 1.002162501813825, | |
| "learning_rate": 9.999447110535026e-06, | |
| "loss": 0.5433, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.31629392971246006, | |
| "grad_norm": 1.0679692282792004, | |
| "learning_rate": 9.999136119166803e-06, | |
| "loss": 0.5233, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.3194888178913738, | |
| "grad_norm": 0.9013455224092123, | |
| "learning_rate": 9.998756027362308e-06, | |
| "loss": 0.5787, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.3226837060702875, | |
| "grad_norm": 0.9715869077223206, | |
| "learning_rate": 9.99830684037535e-06, | |
| "loss": 0.5398, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.3258785942492013, | |
| "grad_norm": 1.0246249595859571, | |
| "learning_rate": 9.9977885644148e-06, | |
| "loss": 0.5612, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.329073482428115, | |
| "grad_norm": 0.9640732938631273, | |
| "learning_rate": 9.997201206644522e-06, | |
| "loss": 0.506, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.33226837060702874, | |
| "grad_norm": 0.8481753728957393, | |
| "learning_rate": 9.99654477518325e-06, | |
| "loss": 0.5465, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.3354632587859425, | |
| "grad_norm": 1.0785966848337503, | |
| "learning_rate": 9.995819279104494e-06, | |
| "loss": 0.5071, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.33865814696485624, | |
| "grad_norm": 0.9621589109623367, | |
| "learning_rate": 9.995024728436402e-06, | |
| "loss": 0.5008, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.34185303514376997, | |
| "grad_norm": 0.9742254294374358, | |
| "learning_rate": 9.994161134161635e-06, | |
| "loss": 0.5627, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.3450479233226837, | |
| "grad_norm": 0.9339518708258924, | |
| "learning_rate": 9.993228508217201e-06, | |
| "loss": 0.5221, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.34824281150159747, | |
| "grad_norm": 0.8352060269189646, | |
| "learning_rate": 9.9922268634943e-06, | |
| "loss": 0.5327, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.3514376996805112, | |
| "grad_norm": 0.9672282064745698, | |
| "learning_rate": 9.991156213838143e-06, | |
| "loss": 0.5285, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.3546325878594249, | |
| "grad_norm": 0.9928070546447347, | |
| "learning_rate": 9.990016574047757e-06, | |
| "loss": 0.6163, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.35782747603833864, | |
| "grad_norm": 0.980621046187048, | |
| "learning_rate": 9.988807959875785e-06, | |
| "loss": 0.5783, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.3610223642172524, | |
| "grad_norm": 0.9698153125433151, | |
| "learning_rate": 9.987530388028269e-06, | |
| "loss": 0.5686, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.36421725239616615, | |
| "grad_norm": 0.9660154212525479, | |
| "learning_rate": 9.986183876164412e-06, | |
| "loss": 0.6109, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.36741214057507987, | |
| "grad_norm": 0.9000420730099195, | |
| "learning_rate": 9.984768442896342e-06, | |
| "loss": 0.4905, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.3706070287539936, | |
| "grad_norm": 0.8723111757350182, | |
| "learning_rate": 9.983284107788852e-06, | |
| "loss": 0.5219, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.3738019169329074, | |
| "grad_norm": 0.8504971094012811, | |
| "learning_rate": 9.981730891359123e-06, | |
| "loss": 0.4976, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.3769968051118211, | |
| "grad_norm": 0.83454729499801, | |
| "learning_rate": 9.980108815076456e-06, | |
| "loss": 0.518, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.3801916932907348, | |
| "grad_norm": 0.8907660506902001, | |
| "learning_rate": 9.978417901361958e-06, | |
| "loss": 0.5111, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.38338658146964855, | |
| "grad_norm": 0.8962456117326467, | |
| "learning_rate": 9.976658173588244e-06, | |
| "loss": 0.5197, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.3865814696485623, | |
| "grad_norm": 0.8723572110377498, | |
| "learning_rate": 9.974829656079106e-06, | |
| "loss": 0.5394, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.38977635782747605, | |
| "grad_norm": 1.0284398448699272, | |
| "learning_rate": 9.972932374109184e-06, | |
| "loss": 0.5117, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.3929712460063898, | |
| "grad_norm": 0.831752579591158, | |
| "learning_rate": 9.97096635390361e-06, | |
| "loss": 0.5527, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.3961661341853035, | |
| "grad_norm": 0.8332127211329551, | |
| "learning_rate": 9.968931622637652e-06, | |
| "loss": 0.5466, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.3993610223642173, | |
| "grad_norm": 0.9889873759146678, | |
| "learning_rate": 9.966828208436332e-06, | |
| "loss": 0.5412, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.402555910543131, | |
| "grad_norm": 0.9280680742855908, | |
| "learning_rate": 9.96465614037404e-06, | |
| "loss": 0.602, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.4057507987220447, | |
| "grad_norm": 0.8293097370353433, | |
| "learning_rate": 9.962415448474134e-06, | |
| "loss": 0.5469, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.40894568690095845, | |
| "grad_norm": 0.7617699546730983, | |
| "learning_rate": 9.960106163708522e-06, | |
| "loss": 0.5003, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.41214057507987223, | |
| "grad_norm": 0.9666132086437509, | |
| "learning_rate": 9.95772831799724e-06, | |
| "loss": 0.5305, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.41533546325878595, | |
| "grad_norm": 0.8515196546140202, | |
| "learning_rate": 9.955281944207998e-06, | |
| "loss": 0.5667, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.4185303514376997, | |
| "grad_norm": 0.8268237405590374, | |
| "learning_rate": 9.95276707615574e-06, | |
| "loss": 0.5512, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.4217252396166134, | |
| "grad_norm": 0.8399302374244939, | |
| "learning_rate": 9.950183748602164e-06, | |
| "loss": 0.5074, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.4249201277955272, | |
| "grad_norm": 0.798796366583729, | |
| "learning_rate": 9.947531997255256e-06, | |
| "loss": 0.5252, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.4281150159744409, | |
| "grad_norm": 0.8461211398114474, | |
| "learning_rate": 9.944811858768782e-06, | |
| "loss": 0.5889, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.43130990415335463, | |
| "grad_norm": 0.8645072380749989, | |
| "learning_rate": 9.94202337074179e-06, | |
| "loss": 0.532, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.43450479233226835, | |
| "grad_norm": 0.7905196447402825, | |
| "learning_rate": 9.939166571718086e-06, | |
| "loss": 0.5234, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.43769968051118213, | |
| "grad_norm": 0.8422357866045145, | |
| "learning_rate": 9.936241501185706e-06, | |
| "loss": 0.5283, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.44089456869009586, | |
| "grad_norm": 0.9541587953845362, | |
| "learning_rate": 9.933248199576366e-06, | |
| "loss": 0.5404, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.4440894568690096, | |
| "grad_norm": 0.8496124061447565, | |
| "learning_rate": 9.930186708264902e-06, | |
| "loss": 0.5509, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.4472843450479233, | |
| "grad_norm": 0.9820024145545019, | |
| "learning_rate": 9.927057069568704e-06, | |
| "loss": 0.5155, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.4504792332268371, | |
| "grad_norm": 0.8320575514975224, | |
| "learning_rate": 9.923859326747125e-06, | |
| "loss": 0.5829, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.4536741214057508, | |
| "grad_norm": 0.7831612160017987, | |
| "learning_rate": 9.920593524000887e-06, | |
| "loss": 0.52, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.45686900958466453, | |
| "grad_norm": 0.840014154368057, | |
| "learning_rate": 9.917259706471469e-06, | |
| "loss": 0.5283, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.46006389776357826, | |
| "grad_norm": 0.9362051002065757, | |
| "learning_rate": 9.913857920240481e-06, | |
| "loss": 0.5155, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.46325878594249204, | |
| "grad_norm": 0.8512954379893516, | |
| "learning_rate": 9.91038821232903e-06, | |
| "loss": 0.5403, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.46645367412140576, | |
| "grad_norm": 0.7839424808363111, | |
| "learning_rate": 9.906850630697068e-06, | |
| "loss": 0.5234, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.4696485623003195, | |
| "grad_norm": 0.8648328590724287, | |
| "learning_rate": 9.903245224242732e-06, | |
| "loss": 0.5187, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.4728434504792332, | |
| "grad_norm": 0.8792377942973435, | |
| "learning_rate": 9.899572042801662e-06, | |
| "loss": 0.5577, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.476038338658147, | |
| "grad_norm": 0.8925497509434575, | |
| "learning_rate": 9.895831137146319e-06, | |
| "loss": 0.5622, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.4792332268370607, | |
| "grad_norm": 0.8150179359015267, | |
| "learning_rate": 9.89202255898528e-06, | |
| "loss": 0.4704, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.48242811501597443, | |
| "grad_norm": 0.9444671033438701, | |
| "learning_rate": 9.888146360962523e-06, | |
| "loss": 0.5426, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.48562300319488816, | |
| "grad_norm": 0.8751489294460165, | |
| "learning_rate": 9.8842025966567e-06, | |
| "loss": 0.5051, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.48881789137380194, | |
| "grad_norm": 0.8540535350580652, | |
| "learning_rate": 9.880191320580396e-06, | |
| "loss": 0.5203, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.49201277955271566, | |
| "grad_norm": 0.9280441125692708, | |
| "learning_rate": 9.876112588179378e-06, | |
| "loss": 0.5137, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.4952076677316294, | |
| "grad_norm": 0.8596304256499511, | |
| "learning_rate": 9.87196645583182e-06, | |
| "loss": 0.5207, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.4984025559105431, | |
| "grad_norm": 0.9988776485113224, | |
| "learning_rate": 9.86775298084754e-06, | |
| "loss": 0.5505, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.5015974440894568, | |
| "grad_norm": 0.8471613181896762, | |
| "learning_rate": 9.863472221467189e-06, | |
| "loss": 0.474, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.5047923322683706, | |
| "grad_norm": 0.9539587230629798, | |
| "learning_rate": 9.85912423686146e-06, | |
| "loss": 0.4911, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.5079872204472844, | |
| "grad_norm": 0.8978617942444193, | |
| "learning_rate": 9.854709087130261e-06, | |
| "loss": 0.5203, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.5111821086261981, | |
| "grad_norm": 0.8714417484517785, | |
| "learning_rate": 9.850226833301893e-06, | |
| "loss": 0.5028, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.5143769968051118, | |
| "grad_norm": 1.1739862547523015, | |
| "learning_rate": 9.8456775373322e-06, | |
| "loss": 0.5262, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.5175718849840255, | |
| "grad_norm": 1.0233461372531267, | |
| "learning_rate": 9.841061262103713e-06, | |
| "loss": 0.5206, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.5207667731629393, | |
| "grad_norm": 0.8883324987583867, | |
| "learning_rate": 9.836378071424782e-06, | |
| "loss": 0.5538, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.5239616613418531, | |
| "grad_norm": 1.0012586097209828, | |
| "learning_rate": 9.831628030028698e-06, | |
| "loss": 0.543, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.5271565495207667, | |
| "grad_norm": 0.9562090852478352, | |
| "learning_rate": 9.826811203572785e-06, | |
| "loss": 0.5294, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.5303514376996805, | |
| "grad_norm": 0.9049046293095923, | |
| "learning_rate": 9.821927658637518e-06, | |
| "loss": 0.5156, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.5335463258785943, | |
| "grad_norm": 0.7877641278063916, | |
| "learning_rate": 9.81697746272557e-06, | |
| "loss": 0.4903, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.536741214057508, | |
| "grad_norm": 0.8443481947780415, | |
| "learning_rate": 9.811960684260907e-06, | |
| "loss": 0.4907, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.5399361022364217, | |
| "grad_norm": 0.898083258200536, | |
| "learning_rate": 9.80687739258782e-06, | |
| "loss": 0.5494, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.5431309904153354, | |
| "grad_norm": 0.8517585039933291, | |
| "learning_rate": 9.801727657969988e-06, | |
| "loss": 0.5349, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.5463258785942492, | |
| "grad_norm": 0.8024396946511465, | |
| "learning_rate": 9.796511551589492e-06, | |
| "loss": 0.5363, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.549520766773163, | |
| "grad_norm": 1.0176737585507962, | |
| "learning_rate": 9.791229145545832e-06, | |
| "loss": 0.5323, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.5527156549520766, | |
| "grad_norm": 0.8838906840771191, | |
| "learning_rate": 9.785880512854937e-06, | |
| "loss": 0.542, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.5559105431309904, | |
| "grad_norm": 0.8473030231523261, | |
| "learning_rate": 9.78046572744815e-06, | |
| "loss": 0.5699, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.5591054313099042, | |
| "grad_norm": 0.825059066819414, | |
| "learning_rate": 9.77498486417121e-06, | |
| "loss": 0.518, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.5623003194888179, | |
| "grad_norm": 0.9674728298434031, | |
| "learning_rate": 9.769437998783216e-06, | |
| "loss": 0.5058, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.5654952076677316, | |
| "grad_norm": 0.8024091577609612, | |
| "learning_rate": 9.763825207955577e-06, | |
| "loss": 0.5356, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.5686900958466453, | |
| "grad_norm": 0.984512919291256, | |
| "learning_rate": 9.758146569270957e-06, | |
| "loss": 0.5093, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.5718849840255591, | |
| "grad_norm": 0.8625469308559135, | |
| "learning_rate": 9.7524021612222e-06, | |
| "loss": 0.5041, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.5750798722044729, | |
| "grad_norm": 0.8997458559279546, | |
| "learning_rate": 9.746592063211247e-06, | |
| "loss": 0.5406, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.5782747603833865, | |
| "grad_norm": 0.8040161888487145, | |
| "learning_rate": 9.74071635554803e-06, | |
| "loss": 0.5403, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.5814696485623003, | |
| "grad_norm": 0.8251026410970733, | |
| "learning_rate": 9.73477511944938e-06, | |
| "loss": 0.5193, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.5846645367412141, | |
| "grad_norm": 0.8831385892825742, | |
| "learning_rate": 9.728768437037882e-06, | |
| "loss": 0.5198, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.5878594249201278, | |
| "grad_norm": 0.770865412532913, | |
| "learning_rate": 9.722696391340762e-06, | |
| "loss": 0.5051, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.5910543130990416, | |
| "grad_norm": 0.8272777118151003, | |
| "learning_rate": 9.716559066288716e-06, | |
| "loss": 0.5204, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.5942492012779552, | |
| "grad_norm": 0.8810836012906247, | |
| "learning_rate": 9.710356546714774e-06, | |
| "loss": 0.4819, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.597444089456869, | |
| "grad_norm": 0.8321988164095505, | |
| "learning_rate": 9.704088918353108e-06, | |
| "loss": 0.4975, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.6006389776357828, | |
| "grad_norm": 0.8350120588975233, | |
| "learning_rate": 9.697756267837856e-06, | |
| "loss": 0.506, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.6038338658146964, | |
| "grad_norm": 0.7780826409389476, | |
| "learning_rate": 9.691358682701927e-06, | |
| "loss": 0.4885, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.6070287539936102, | |
| "grad_norm": 0.8938697808686494, | |
| "learning_rate": 9.684896251375784e-06, | |
| "loss": 0.5204, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.610223642172524, | |
| "grad_norm": 0.8297429232981851, | |
| "learning_rate": 9.678369063186224e-06, | |
| "loss": 0.4774, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.6134185303514377, | |
| "grad_norm": 0.8969345717007828, | |
| "learning_rate": 9.671777208355146e-06, | |
| "loss": 0.5183, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.6166134185303515, | |
| "grad_norm": 0.8605903779349711, | |
| "learning_rate": 9.665120777998303e-06, | |
| "loss": 0.5393, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.6198083067092651, | |
| "grad_norm": 0.8478878853382145, | |
| "learning_rate": 9.658399864124037e-06, | |
| "loss": 0.5264, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.6230031948881789, | |
| "grad_norm": 0.9236215575439594, | |
| "learning_rate": 9.65161455963202e-06, | |
| "loss": 0.5184, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.6261980830670927, | |
| "grad_norm": 0.7325089308594785, | |
| "learning_rate": 9.64476495831195e-06, | |
| "loss": 0.5213, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.6293929712460063, | |
| "grad_norm": 0.8193350984719754, | |
| "learning_rate": 9.637851154842279e-06, | |
| "loss": 0.5167, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.6325878594249201, | |
| "grad_norm": 0.8155853253850004, | |
| "learning_rate": 9.630873244788884e-06, | |
| "loss": 0.4814, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.6357827476038339, | |
| "grad_norm": 0.8266019460516769, | |
| "learning_rate": 9.623831324603755e-06, | |
| "loss": 0.487, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.6389776357827476, | |
| "grad_norm": 1.029547472699715, | |
| "learning_rate": 9.61672549162366e-06, | |
| "loss": 0.4843, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.6421725239616614, | |
| "grad_norm": 0.8453539576168925, | |
| "learning_rate": 9.6095558440688e-06, | |
| "loss": 0.5234, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.645367412140575, | |
| "grad_norm": 0.8910853768115474, | |
| "learning_rate": 9.602322481041457e-06, | |
| "loss": 0.535, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.6485623003194888, | |
| "grad_norm": 1.039608374119123, | |
| "learning_rate": 9.595025502524609e-06, | |
| "loss": 0.4762, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.6517571884984026, | |
| "grad_norm": 0.9738033589237602, | |
| "learning_rate": 9.587665009380565e-06, | |
| "loss": 0.5302, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.6549520766773163, | |
| "grad_norm": 0.9057631459448521, | |
| "learning_rate": 9.580241103349562e-06, | |
| "loss": 0.4887, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.65814696485623, | |
| "grad_norm": 0.9009879245399683, | |
| "learning_rate": 9.572753887048353e-06, | |
| "loss": 0.5172, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.6613418530351438, | |
| "grad_norm": 0.7947928914543034, | |
| "learning_rate": 9.565203463968808e-06, | |
| "loss": 0.4967, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.6645367412140575, | |
| "grad_norm": 0.9387010712796708, | |
| "learning_rate": 9.557589938476462e-06, | |
| "loss": 0.5581, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.6677316293929713, | |
| "grad_norm": 0.8039755278927958, | |
| "learning_rate": 9.549913415809084e-06, | |
| "loss": 0.547, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.670926517571885, | |
| "grad_norm": 0.899702008695738, | |
| "learning_rate": 9.542174002075221e-06, | |
| "loss": 0.4954, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.6741214057507987, | |
| "grad_norm": 0.860505060633573, | |
| "learning_rate": 9.534371804252727e-06, | |
| "loss": 0.5166, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.6773162939297125, | |
| "grad_norm": 0.742092503955616, | |
| "learning_rate": 9.526506930187294e-06, | |
| "loss": 0.5348, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.6805111821086262, | |
| "grad_norm": 0.7823402422263195, | |
| "learning_rate": 9.518579488590947e-06, | |
| "loss": 0.5431, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.6837060702875399, | |
| "grad_norm": 0.9661234253998444, | |
| "learning_rate": 9.510589589040554e-06, | |
| "loss": 0.5562, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.6869009584664537, | |
| "grad_norm": 0.7521161190353953, | |
| "learning_rate": 9.502537341976305e-06, | |
| "loss": 0.4647, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.6900958466453674, | |
| "grad_norm": 0.7987183646840882, | |
| "learning_rate": 9.494422858700188e-06, | |
| "loss": 0.5153, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.6932907348242812, | |
| "grad_norm": 0.9841955582435438, | |
| "learning_rate": 9.48624625137445e-06, | |
| "loss": 0.5555, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.6964856230031949, | |
| "grad_norm": 0.8401707334735183, | |
| "learning_rate": 9.478007633020043e-06, | |
| "loss": 0.5471, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.6996805111821086, | |
| "grad_norm": 0.8710741844589728, | |
| "learning_rate": 9.469707117515068e-06, | |
| "loss": 0.5439, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.7028753993610224, | |
| "grad_norm": 0.8481362485002034, | |
| "learning_rate": 9.461344819593194e-06, | |
| "loss": 0.5397, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.7060702875399361, | |
| "grad_norm": 0.9234317739603707, | |
| "learning_rate": 9.452920854842085e-06, | |
| "loss": 0.5103, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.7092651757188498, | |
| "grad_norm": 0.7849131415351538, | |
| "learning_rate": 9.44443533970178e-06, | |
| "loss": 0.5386, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.7124600638977636, | |
| "grad_norm": 0.7364218991408114, | |
| "learning_rate": 9.435888391463108e-06, | |
| "loss": 0.4815, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.7156549520766773, | |
| "grad_norm": 0.824648899698229, | |
| "learning_rate": 9.427280128266049e-06, | |
| "loss": 0.4967, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.7188498402555911, | |
| "grad_norm": 0.8053371492291931, | |
| "learning_rate": 9.418610669098114e-06, | |
| "loss": 0.502, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.7220447284345048, | |
| "grad_norm": 0.8745575246788325, | |
| "learning_rate": 9.409880133792684e-06, | |
| "loss": 0.5471, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.7252396166134185, | |
| "grad_norm": 0.8426487522968908, | |
| "learning_rate": 9.40108864302737e-06, | |
| "loss": 0.5415, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 0.7284345047923323, | |
| "grad_norm": 0.8446459909213165, | |
| "learning_rate": 9.392236318322339e-06, | |
| "loss": 0.5131, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.731629392971246, | |
| "grad_norm": 0.8940769463027018, | |
| "learning_rate": 9.383323282038632e-06, | |
| "loss": 0.5174, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 0.7348242811501597, | |
| "grad_norm": 0.7504679870390856, | |
| "learning_rate": 9.374349657376473e-06, | |
| "loss": 0.5025, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.7380191693290735, | |
| "grad_norm": 0.8843745789113971, | |
| "learning_rate": 9.365315568373569e-06, | |
| "loss": 0.5119, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 0.7412140575079872, | |
| "grad_norm": 0.7359230595220048, | |
| "learning_rate": 9.356221139903395e-06, | |
| "loss": 0.5222, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.744408945686901, | |
| "grad_norm": 0.8000874826998402, | |
| "learning_rate": 9.347066497673462e-06, | |
| "loss": 0.4956, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 0.7476038338658147, | |
| "grad_norm": 0.8244411036517779, | |
| "learning_rate": 9.337851768223589e-06, | |
| "loss": 0.4948, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.7507987220447284, | |
| "grad_norm": 0.8355217449150175, | |
| "learning_rate": 9.328577078924151e-06, | |
| "loss": 0.5117, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.7539936102236422, | |
| "grad_norm": 0.7893167216575436, | |
| "learning_rate": 9.319242557974306e-06, | |
| "loss": 0.4821, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.7571884984025559, | |
| "grad_norm": 0.92890577949437, | |
| "learning_rate": 9.309848334400247e-06, | |
| "loss": 0.5237, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 0.7603833865814696, | |
| "grad_norm": 0.8736286224281079, | |
| "learning_rate": 9.300394538053395e-06, | |
| "loss": 0.5017, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.7635782747603834, | |
| "grad_norm": 0.9077340441102514, | |
| "learning_rate": 9.29088129960862e-06, | |
| "loss": 0.5127, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 0.7667731629392971, | |
| "grad_norm": 0.6940764153582664, | |
| "learning_rate": 9.281308750562426e-06, | |
| "loss": 0.4893, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.7699680511182109, | |
| "grad_norm": 0.880965679725469, | |
| "learning_rate": 9.271677023231137e-06, | |
| "loss": 0.4983, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 0.7731629392971247, | |
| "grad_norm": 0.8538265562564804, | |
| "learning_rate": 9.261986250749068e-06, | |
| "loss": 0.4765, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.7763578274760383, | |
| "grad_norm": 0.7630022035581716, | |
| "learning_rate": 9.252236567066686e-06, | |
| "loss": 0.4874, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 0.7795527156549521, | |
| "grad_norm": 0.8281654773029874, | |
| "learning_rate": 9.242428106948748e-06, | |
| "loss": 0.5903, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.7827476038338658, | |
| "grad_norm": 0.9203463968163862, | |
| "learning_rate": 9.23256100597246e-06, | |
| "loss": 0.5189, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.7859424920127795, | |
| "grad_norm": 0.748700979427418, | |
| "learning_rate": 9.22263540052558e-06, | |
| "loss": 0.5151, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 0.7891373801916933, | |
| "grad_norm": 0.7392344241335602, | |
| "learning_rate": 9.212651427804544e-06, | |
| "loss": 0.5593, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 0.792332268370607, | |
| "grad_norm": 0.7348453831535354, | |
| "learning_rate": 9.202609225812572e-06, | |
| "loss": 0.5106, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.7955271565495208, | |
| "grad_norm": 0.8370603755867384, | |
| "learning_rate": 9.192508933357753e-06, | |
| "loss": 0.4975, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 0.7987220447284346, | |
| "grad_norm": 0.817703134992455, | |
| "learning_rate": 9.182350690051134e-06, | |
| "loss": 0.5089, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.8019169329073482, | |
| "grad_norm": 0.7599324215664683, | |
| "learning_rate": 9.172134636304783e-06, | |
| "loss": 0.4621, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 0.805111821086262, | |
| "grad_norm": 0.8083512869640488, | |
| "learning_rate": 9.16186091332985e-06, | |
| "loss": 0.5013, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 0.8083067092651757, | |
| "grad_norm": 0.829560542023121, | |
| "learning_rate": 9.15152966313462e-06, | |
| "loss": 0.4925, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 0.8115015974440895, | |
| "grad_norm": 0.7561288554172421, | |
| "learning_rate": 9.141141028522544e-06, | |
| "loss": 0.4612, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 0.8146964856230032, | |
| "grad_norm": 0.7950323702810275, | |
| "learning_rate": 9.130695153090272e-06, | |
| "loss": 0.4837, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.8178913738019169, | |
| "grad_norm": 0.8516278924258801, | |
| "learning_rate": 9.120192181225658e-06, | |
| "loss": 0.5045, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 0.8210862619808307, | |
| "grad_norm": 0.8916603008544254, | |
| "learning_rate": 9.109632258105771e-06, | |
| "loss": 0.4874, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 0.8242811501597445, | |
| "grad_norm": 0.7691147493078145, | |
| "learning_rate": 9.099015529694894e-06, | |
| "loss": 0.4771, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 0.8274760383386581, | |
| "grad_norm": 0.869859122278694, | |
| "learning_rate": 9.088342142742493e-06, | |
| "loss": 0.4943, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 0.8306709265175719, | |
| "grad_norm": 0.8183569250546232, | |
| "learning_rate": 9.077612244781196e-06, | |
| "loss": 0.5186, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.8338658146964856, | |
| "grad_norm": 0.8121246614868811, | |
| "learning_rate": 9.066825984124751e-06, | |
| "loss": 0.5066, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 0.8370607028753994, | |
| "grad_norm": 0.8809673133319126, | |
| "learning_rate": 9.055983509865988e-06, | |
| "loss": 0.5428, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 0.8402555910543131, | |
| "grad_norm": 0.948125364179264, | |
| "learning_rate": 9.045084971874738e-06, | |
| "loss": 0.5276, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 0.8434504792332268, | |
| "grad_norm": 0.8880641547871259, | |
| "learning_rate": 9.034130520795774e-06, | |
| "loss": 0.5562, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 0.8466453674121406, | |
| "grad_norm": 0.732721292018942, | |
| "learning_rate": 9.023120308046726e-06, | |
| "loss": 0.504, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.8498402555910544, | |
| "grad_norm": 0.9396860529462092, | |
| "learning_rate": 9.012054485815995e-06, | |
| "loss": 0.4649, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 0.853035143769968, | |
| "grad_norm": 0.8242804566044789, | |
| "learning_rate": 9.00093320706063e-06, | |
| "loss": 0.4908, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 0.8562300319488818, | |
| "grad_norm": 0.7136166681699279, | |
| "learning_rate": 8.989756625504237e-06, | |
| "loss": 0.483, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 0.8594249201277955, | |
| "grad_norm": 0.9605400874780818, | |
| "learning_rate": 8.978524895634842e-06, | |
| "loss": 0.5018, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 0.8626198083067093, | |
| "grad_norm": 0.9100046397923284, | |
| "learning_rate": 8.967238172702754e-06, | |
| "loss": 0.5439, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.865814696485623, | |
| "grad_norm": 0.7997810149108854, | |
| "learning_rate": 8.95589661271842e-06, | |
| "loss": 0.5334, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 0.8690095846645367, | |
| "grad_norm": 0.9547461697852979, | |
| "learning_rate": 8.94450037245028e-06, | |
| "loss": 0.4968, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 0.8722044728434505, | |
| "grad_norm": 0.8023978860589094, | |
| "learning_rate": 8.933049609422582e-06, | |
| "loss": 0.4893, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 0.8753993610223643, | |
| "grad_norm": 0.9014885751500846, | |
| "learning_rate": 8.921544481913218e-06, | |
| "loss": 0.5345, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 0.8785942492012779, | |
| "grad_norm": 0.8131253814280156, | |
| "learning_rate": 8.909985148951528e-06, | |
| "loss": 0.5156, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.8817891373801917, | |
| "grad_norm": 0.8324175224666767, | |
| "learning_rate": 8.898371770316113e-06, | |
| "loss": 0.529, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 0.8849840255591054, | |
| "grad_norm": 0.8492286873279923, | |
| "learning_rate": 8.886704506532611e-06, | |
| "loss": 0.5067, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 0.8881789137380192, | |
| "grad_norm": 0.9125893960779855, | |
| "learning_rate": 8.874983518871488e-06, | |
| "loss": 0.4631, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 0.8913738019169329, | |
| "grad_norm": 0.8225205350268786, | |
| "learning_rate": 8.86320896934581e-06, | |
| "loss": 0.5109, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 0.8945686900958466, | |
| "grad_norm": 0.9070535249722314, | |
| "learning_rate": 8.851381020709e-06, | |
| "loss": 0.5404, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.8977635782747604, | |
| "grad_norm": 0.8719696885529683, | |
| "learning_rate": 8.839499836452584e-06, | |
| "loss": 0.544, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 0.9009584664536742, | |
| "grad_norm": 0.8472227031001561, | |
| "learning_rate": 8.827565580803944e-06, | |
| "loss": 0.4744, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 0.9041533546325878, | |
| "grad_norm": 0.7744023330970529, | |
| "learning_rate": 8.815578418724031e-06, | |
| "loss": 0.4917, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 0.9073482428115016, | |
| "grad_norm": 0.6869536247022346, | |
| "learning_rate": 8.803538515905102e-06, | |
| "loss": 0.4877, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 0.9105431309904153, | |
| "grad_norm": 0.89412421480912, | |
| "learning_rate": 8.791446038768416e-06, | |
| "loss": 0.5375, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.9137380191693291, | |
| "grad_norm": 0.8772749555457497, | |
| "learning_rate": 8.779301154461945e-06, | |
| "loss": 0.5083, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 0.9169329073482428, | |
| "grad_norm": 0.890143694882453, | |
| "learning_rate": 8.76710403085805e-06, | |
| "loss": 0.5355, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 0.9201277955271565, | |
| "grad_norm": 0.8678519842166535, | |
| "learning_rate": 8.754854836551174e-06, | |
| "loss": 0.5028, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 0.9233226837060703, | |
| "grad_norm": 0.7828124341079867, | |
| "learning_rate": 8.742553740855507e-06, | |
| "loss": 0.465, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 0.9265175718849841, | |
| "grad_norm": 1.0678308340301852, | |
| "learning_rate": 8.730200913802638e-06, | |
| "loss": 0.5438, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.9297124600638977, | |
| "grad_norm": 0.8351824750309134, | |
| "learning_rate": 8.717796526139218e-06, | |
| "loss": 0.4949, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 0.9329073482428115, | |
| "grad_norm": 0.8499796924611565, | |
| "learning_rate": 8.70534074932459e-06, | |
| "loss": 0.5081, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 0.9361022364217252, | |
| "grad_norm": 0.9212590356714732, | |
| "learning_rate": 8.692833755528426e-06, | |
| "loss": 0.5278, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 0.939297124600639, | |
| "grad_norm": 0.8289607688137761, | |
| "learning_rate": 8.680275717628336e-06, | |
| "loss": 0.5033, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 0.9424920127795527, | |
| "grad_norm": 0.9377628019643448, | |
| "learning_rate": 8.667666809207495e-06, | |
| "loss": 0.5002, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.9456869009584664, | |
| "grad_norm": 0.8595609804694344, | |
| "learning_rate": 8.655007204552228e-06, | |
| "loss": 0.5699, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 0.9488817891373802, | |
| "grad_norm": 0.9144332718871088, | |
| "learning_rate": 8.64229707864961e-06, | |
| "loss": 0.4943, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 0.952076677316294, | |
| "grad_norm": 0.8709388293908786, | |
| "learning_rate": 8.629536607185042e-06, | |
| "loss": 0.5194, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 0.9552715654952076, | |
| "grad_norm": 0.8594579405188586, | |
| "learning_rate": 8.616725966539831e-06, | |
| "loss": 0.5146, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 0.9584664536741214, | |
| "grad_norm": 0.9937747625380199, | |
| "learning_rate": 8.60386533378874e-06, | |
| "loss": 0.5002, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.9616613418530351, | |
| "grad_norm": 0.8918915409321642, | |
| "learning_rate": 8.590954886697554e-06, | |
| "loss": 0.4845, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 0.9648562300319489, | |
| "grad_norm": 0.7843164146186518, | |
| "learning_rate": 8.577994803720605e-06, | |
| "loss": 0.4975, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 0.9680511182108626, | |
| "grad_norm": 0.9015940400069826, | |
| "learning_rate": 8.564985263998327e-06, | |
| "loss": 0.4797, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 0.9712460063897763, | |
| "grad_norm": 0.7029136656889042, | |
| "learning_rate": 8.551926447354759e-06, | |
| "loss": 0.4639, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 0.9744408945686901, | |
| "grad_norm": 0.8144223926791186, | |
| "learning_rate": 8.538818534295076e-06, | |
| "loss": 0.4892, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.9776357827476039, | |
| "grad_norm": 0.736223257739176, | |
| "learning_rate": 8.525661706003083e-06, | |
| "loss": 0.484, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 0.9808306709265175, | |
| "grad_norm": 0.8004441127612412, | |
| "learning_rate": 8.512456144338717e-06, | |
| "loss": 0.4956, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 0.9840255591054313, | |
| "grad_norm": 0.8092384053836325, | |
| "learning_rate": 8.499202031835532e-06, | |
| "loss": 0.4973, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 0.987220447284345, | |
| "grad_norm": 0.9020628340911017, | |
| "learning_rate": 8.485899551698166e-06, | |
| "loss": 0.5086, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 0.9904153354632588, | |
| "grad_norm": 0.7802779374482116, | |
| "learning_rate": 8.472548887799833e-06, | |
| "loss": 0.4838, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.9936102236421726, | |
| "grad_norm": 0.6826024474896338, | |
| "learning_rate": 8.45915022467975e-06, | |
| "loss": 0.4707, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 0.9968051118210862, | |
| "grad_norm": 0.9604893538879873, | |
| "learning_rate": 8.445703747540614e-06, | |
| "loss": 0.5721, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 0.8699409146438202, | |
| "learning_rate": 8.43220964224602e-06, | |
| "loss": 0.5683, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 1.0031948881789137, | |
| "grad_norm": 0.7764317553928854, | |
| "learning_rate": 8.418668095317912e-06, | |
| "loss": 0.4403, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 1.0063897763578276, | |
| "grad_norm": 0.7338758896539651, | |
| "learning_rate": 8.405079293933986e-06, | |
| "loss": 0.4496, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 1.0095846645367412, | |
| "grad_norm": 0.7540355701636785, | |
| "learning_rate": 8.391443425925118e-06, | |
| "loss": 0.3808, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 1.012779552715655, | |
| "grad_norm": 0.7946391602416338, | |
| "learning_rate": 8.37776067977276e-06, | |
| "loss": 0.3941, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 1.0159744408945688, | |
| "grad_norm": 0.7751109185829435, | |
| "learning_rate": 8.36403124460633e-06, | |
| "loss": 0.41, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 1.0191693290734825, | |
| "grad_norm": 0.699379354402552, | |
| "learning_rate": 8.350255310200611e-06, | |
| "loss": 0.4023, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 1.0223642172523961, | |
| "grad_norm": 0.8578136972746896, | |
| "learning_rate": 8.336433066973122e-06, | |
| "loss": 0.4308, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 1.0255591054313098, | |
| "grad_norm": 0.8333999234295557, | |
| "learning_rate": 8.322564705981476e-06, | |
| "loss": 0.4339, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 1.0287539936102237, | |
| "grad_norm": 0.8682415637488364, | |
| "learning_rate": 8.308650418920751e-06, | |
| "loss": 0.4639, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 1.0319488817891374, | |
| "grad_norm": 0.7010492104711966, | |
| "learning_rate": 8.294690398120843e-06, | |
| "loss": 0.4138, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 1.035143769968051, | |
| "grad_norm": 0.8352684574898297, | |
| "learning_rate": 8.280684836543794e-06, | |
| "loss": 0.4181, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 1.038338658146965, | |
| "grad_norm": 0.8062281397866784, | |
| "learning_rate": 8.266633927781135e-06, | |
| "loss": 0.3765, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 1.0415335463258786, | |
| "grad_norm": 0.8011168130993689, | |
| "learning_rate": 8.25253786605121e-06, | |
| "loss": 0.4155, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 1.0447284345047922, | |
| "grad_norm": 0.7929363913746529, | |
| "learning_rate": 8.238396846196483e-06, | |
| "loss": 0.4796, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 1.0479233226837061, | |
| "grad_norm": 0.681042440631801, | |
| "learning_rate": 8.224211063680854e-06, | |
| "loss": 0.3758, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 1.0511182108626198, | |
| "grad_norm": 0.8718135408032824, | |
| "learning_rate": 8.209980714586955e-06, | |
| "loss": 0.4103, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 1.0543130990415335, | |
| "grad_norm": 0.8413791063039867, | |
| "learning_rate": 8.195705995613436e-06, | |
| "loss": 0.4299, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 1.0575079872204474, | |
| "grad_norm": 0.8465944020037799, | |
| "learning_rate": 8.181387104072252e-06, | |
| "loss": 0.4611, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 1.060702875399361, | |
| "grad_norm": 0.768940507092269, | |
| "learning_rate": 8.167024237885927e-06, | |
| "loss": 0.4186, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 1.0638977635782747, | |
| "grad_norm": 0.7522783224445742, | |
| "learning_rate": 8.152617595584827e-06, | |
| "loss": 0.4406, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 1.0670926517571886, | |
| "grad_norm": 0.768895921716199, | |
| "learning_rate": 8.138167376304411e-06, | |
| "loss": 0.383, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 1.0702875399361023, | |
| "grad_norm": 0.7638818070910931, | |
| "learning_rate": 8.123673779782481e-06, | |
| "loss": 0.4079, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 1.073482428115016, | |
| "grad_norm": 0.6697970201976026, | |
| "learning_rate": 8.10913700635642e-06, | |
| "loss": 0.3388, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 1.0766773162939298, | |
| "grad_norm": 0.6812118706436625, | |
| "learning_rate": 8.094557256960419e-06, | |
| "loss": 0.4072, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 1.0798722044728435, | |
| "grad_norm": 0.8770247682643849, | |
| "learning_rate": 8.079934733122708e-06, | |
| "loss": 0.4094, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 1.0830670926517572, | |
| "grad_norm": 0.6616362830243507, | |
| "learning_rate": 8.065269636962765e-06, | |
| "loss": 0.444, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 1.0862619808306708, | |
| "grad_norm": 0.7111069767944058, | |
| "learning_rate": 8.05056217118852e-06, | |
| "loss": 0.4554, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 1.0894568690095847, | |
| "grad_norm": 0.7694717252053866, | |
| "learning_rate": 8.035812539093557e-06, | |
| "loss": 0.4552, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 1.0926517571884984, | |
| "grad_norm": 0.7393731584116311, | |
| "learning_rate": 8.021020944554305e-06, | |
| "loss": 0.4017, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 1.095846645367412, | |
| "grad_norm": 0.6752488842069413, | |
| "learning_rate": 8.006187592027215e-06, | |
| "loss": 0.4548, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 1.099041533546326, | |
| "grad_norm": 0.6246938261456642, | |
| "learning_rate": 7.991312686545939e-06, | |
| "loss": 0.4045, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 1.1022364217252396, | |
| "grad_norm": 0.7350229600234459, | |
| "learning_rate": 7.976396433718492e-06, | |
| "loss": 0.4012, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 1.1054313099041533, | |
| "grad_norm": 0.7695718734465381, | |
| "learning_rate": 7.961439039724413e-06, | |
| "loss": 0.4361, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 1.1086261980830672, | |
| "grad_norm": 0.7248682429154152, | |
| "learning_rate": 7.946440711311913e-06, | |
| "loss": 0.371, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 1.1118210862619808, | |
| "grad_norm": 0.7243053070616239, | |
| "learning_rate": 7.931401655795021e-06, | |
| "loss": 0.4156, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 1.1150159744408945, | |
| "grad_norm": 0.7358516001684667, | |
| "learning_rate": 7.916322081050708e-06, | |
| "loss": 0.4294, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 1.1182108626198084, | |
| "grad_norm": 0.6989330209550405, | |
| "learning_rate": 7.90120219551603e-06, | |
| "loss": 0.3772, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 1.121405750798722, | |
| "grad_norm": 0.6979994560001975, | |
| "learning_rate": 7.88604220818523e-06, | |
| "loss": 0.4097, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 1.1246006389776357, | |
| "grad_norm": 0.7376492259097831, | |
| "learning_rate": 7.870842328606863e-06, | |
| "loss": 0.4062, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 1.1277955271565494, | |
| "grad_norm": 0.8182890240546004, | |
| "learning_rate": 7.85560276688089e-06, | |
| "loss": 0.4262, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 1.1309904153354633, | |
| "grad_norm": 0.5929082187409976, | |
| "learning_rate": 7.84032373365578e-06, | |
| "loss": 0.3847, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 1.134185303514377, | |
| "grad_norm": 0.5999973735326927, | |
| "learning_rate": 7.825005440125595e-06, | |
| "loss": 0.4121, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 1.1373801916932909, | |
| "grad_norm": 0.7226429475619828, | |
| "learning_rate": 7.809648098027067e-06, | |
| "loss": 0.3919, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 1.1405750798722045, | |
| "grad_norm": 0.6816845849611944, | |
| "learning_rate": 7.794251919636687e-06, | |
| "loss": 0.4472, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 1.1437699680511182, | |
| "grad_norm": 0.7573980172847606, | |
| "learning_rate": 7.778817117767748e-06, | |
| "loss": 0.4232, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 1.1469648562300319, | |
| "grad_norm": 0.7178781383342445, | |
| "learning_rate": 7.76334390576742e-06, | |
| "loss": 0.3673, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 1.1501597444089458, | |
| "grad_norm": 0.7335817902399818, | |
| "learning_rate": 7.747832497513797e-06, | |
| "loss": 0.4067, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 1.1533546325878594, | |
| "grad_norm": 0.7133472371686488, | |
| "learning_rate": 7.732283107412938e-06, | |
| "loss": 0.4314, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 1.156549520766773, | |
| "grad_norm": 0.7986130467269262, | |
| "learning_rate": 7.71669595039591e-06, | |
| "loss": 0.4333, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 1.159744408945687, | |
| "grad_norm": 0.7068834905394128, | |
| "learning_rate": 7.701071241915804e-06, | |
| "loss": 0.4318, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 1.1629392971246006, | |
| "grad_norm": 0.737545144664687, | |
| "learning_rate": 7.685409197944768e-06, | |
| "loss": 0.412, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 1.1661341853035143, | |
| "grad_norm": 0.764234149996104, | |
| "learning_rate": 7.669710034971025e-06, | |
| "loss": 0.4172, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 1.1693290734824282, | |
| "grad_norm": 0.7134888087174184, | |
| "learning_rate": 7.653973969995866e-06, | |
| "loss": 0.4085, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 1.1725239616613419, | |
| "grad_norm": 0.675285473832815, | |
| "learning_rate": 7.638201220530664e-06, | |
| "loss": 0.4266, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 1.1757188498402555, | |
| "grad_norm": 0.8554365485718853, | |
| "learning_rate": 7.622392004593862e-06, | |
| "loss": 0.4018, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 1.1789137380191694, | |
| "grad_norm": 0.7394321475600621, | |
| "learning_rate": 7.60654654070796e-06, | |
| "loss": 0.3781, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 1.182108626198083, | |
| "grad_norm": 0.7522087705245671, | |
| "learning_rate": 7.59066504789649e-06, | |
| "loss": 0.4215, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 1.1853035143769968, | |
| "grad_norm": 0.7859238242025294, | |
| "learning_rate": 7.574747745681e-06, | |
| "loss": 0.4481, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 1.1884984025559104, | |
| "grad_norm": 0.8893594410231564, | |
| "learning_rate": 7.558794854078006e-06, | |
| "loss": 0.3957, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 1.1916932907348243, | |
| "grad_norm": 0.7416322294292199, | |
| "learning_rate": 7.542806593595963e-06, | |
| "loss": 0.4307, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 1.194888178913738, | |
| "grad_norm": 0.8682014698204892, | |
| "learning_rate": 7.526783185232208e-06, | |
| "loss": 0.4225, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 1.1980830670926517, | |
| "grad_norm": 0.808491779347055, | |
| "learning_rate": 7.51072485046991e-06, | |
| "loss": 0.4084, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 1.2012779552715656, | |
| "grad_norm": 0.8685719336829648, | |
| "learning_rate": 7.494631811275008e-06, | |
| "loss": 0.3973, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 1.2044728434504792, | |
| "grad_norm": 0.7901114669485614, | |
| "learning_rate": 7.478504290093138e-06, | |
| "loss": 0.4257, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 1.207667731629393, | |
| "grad_norm": 0.8160312545633633, | |
| "learning_rate": 7.462342509846571e-06, | |
| "loss": 0.4048, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 1.2108626198083068, | |
| "grad_norm": 0.7233374312117897, | |
| "learning_rate": 7.446146693931111e-06, | |
| "loss": 0.3952, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 1.2140575079872205, | |
| "grad_norm": 0.7767917244942978, | |
| "learning_rate": 7.42991706621303e-06, | |
| "loss": 0.4891, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 1.2172523961661341, | |
| "grad_norm": 0.7072787009379669, | |
| "learning_rate": 7.413653851025959e-06, | |
| "loss": 0.4741, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 1.220447284345048, | |
| "grad_norm": 0.746065313367986, | |
| "learning_rate": 7.397357273167789e-06, | |
| "loss": 0.4698, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 1.2236421725239617, | |
| "grad_norm": 0.7234203890871863, | |
| "learning_rate": 7.381027557897568e-06, | |
| "loss": 0.3802, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 1.2268370607028753, | |
| "grad_norm": 0.7599175690114612, | |
| "learning_rate": 7.364664930932385e-06, | |
| "loss": 0.439, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 1.230031948881789, | |
| "grad_norm": 0.8277285657761506, | |
| "learning_rate": 7.348269618444248e-06, | |
| "loss": 0.4635, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 1.233226837060703, | |
| "grad_norm": 0.7151589242001917, | |
| "learning_rate": 7.331841847056962e-06, | |
| "loss": 0.434, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 1.2364217252396166, | |
| "grad_norm": 0.7885405443701896, | |
| "learning_rate": 7.315381843842995e-06, | |
| "loss": 0.3901, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 1.2396166134185305, | |
| "grad_norm": 0.7794048735012029, | |
| "learning_rate": 7.298889836320334e-06, | |
| "loss": 0.4496, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 1.2428115015974441, | |
| "grad_norm": 0.7185850468731043, | |
| "learning_rate": 7.282366052449351e-06, | |
| "loss": 0.4156, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 1.2460063897763578, | |
| "grad_norm": 0.8761942292460905, | |
| "learning_rate": 7.265810720629643e-06, | |
| "loss": 0.4381, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 1.2492012779552715, | |
| "grad_norm": 0.7418876903956504, | |
| "learning_rate": 7.249224069696876e-06, | |
| "loss": 0.4231, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 1.2523961661341854, | |
| "grad_norm": 0.6689548591926106, | |
| "learning_rate": 7.232606328919627e-06, | |
| "loss": 0.4638, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 1.255591054313099, | |
| "grad_norm": 0.8018636198628307, | |
| "learning_rate": 7.215957727996208e-06, | |
| "loss": 0.3911, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 1.2587859424920127, | |
| "grad_norm": 0.8634573351751972, | |
| "learning_rate": 7.199278497051498e-06, | |
| "loss": 0.4627, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 1.2619808306709266, | |
| "grad_norm": 0.7022128622434374, | |
| "learning_rate": 7.182568866633757e-06, | |
| "loss": 0.3625, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 1.2651757188498403, | |
| "grad_norm": 0.7295361331459049, | |
| "learning_rate": 7.16582906771144e-06, | |
| "loss": 0.4223, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 1.268370607028754, | |
| "grad_norm": 0.8351272934479426, | |
| "learning_rate": 7.149059331670009e-06, | |
| "loss": 0.3954, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 1.2715654952076676, | |
| "grad_norm": 0.7773394015956511, | |
| "learning_rate": 7.132259890308726e-06, | |
| "loss": 0.4082, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 1.2747603833865815, | |
| "grad_norm": 0.854650362078553, | |
| "learning_rate": 7.115430975837457e-06, | |
| "loss": 0.4168, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 1.2779552715654952, | |
| "grad_norm": 0.7417248588065127, | |
| "learning_rate": 7.098572820873461e-06, | |
| "loss": 0.4451, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 1.281150159744409, | |
| "grad_norm": 0.7063837427460873, | |
| "learning_rate": 7.081685658438173e-06, | |
| "loss": 0.4155, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 1.2843450479233227, | |
| "grad_norm": 0.842141676152669, | |
| "learning_rate": 7.064769721953975e-06, | |
| "loss": 0.4502, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 1.2875399361022364, | |
| "grad_norm": 0.7471309894180018, | |
| "learning_rate": 7.047825245240989e-06, | |
| "loss": 0.4486, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 1.29073482428115, | |
| "grad_norm": 0.6602847047558186, | |
| "learning_rate": 7.030852462513827e-06, | |
| "loss": 0.4279, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 1.293929712460064, | |
| "grad_norm": 0.7483995023271691, | |
| "learning_rate": 7.013851608378359e-06, | |
| "loss": 0.3944, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 1.2971246006389776, | |
| "grad_norm": 0.8281579239977933, | |
| "learning_rate": 6.9968229178284775e-06, | |
| "loss": 0.4518, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 1.3003194888178915, | |
| "grad_norm": 0.7892389369737978, | |
| "learning_rate": 6.979766626242839e-06, | |
| "loss": 0.407, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 1.3035143769968052, | |
| "grad_norm": 0.7373915710031449, | |
| "learning_rate": 6.9626829693816135e-06, | |
| "loss": 0.413, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 1.3067092651757188, | |
| "grad_norm": 0.697910278003432, | |
| "learning_rate": 6.945572183383229e-06, | |
| "loss": 0.4234, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 1.3099041533546325, | |
| "grad_norm": 0.6940586739732686, | |
| "learning_rate": 6.928434504761106e-06, | |
| "loss": 0.3888, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 1.3130990415335464, | |
| "grad_norm": 0.7928807853026925, | |
| "learning_rate": 6.911270170400385e-06, | |
| "loss": 0.4165, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 1.31629392971246, | |
| "grad_norm": 0.7510163375476263, | |
| "learning_rate": 6.894079417554657e-06, | |
| "loss": 0.419, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 1.3194888178913737, | |
| "grad_norm": 0.7598233883481605, | |
| "learning_rate": 6.8768624838426815e-06, | |
| "loss": 0.3902, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 1.3226837060702876, | |
| "grad_norm": 0.6512401203280582, | |
| "learning_rate": 6.859619607245102e-06, | |
| "loss": 0.4362, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 1.3258785942492013, | |
| "grad_norm": 0.8386440871131998, | |
| "learning_rate": 6.842351026101155e-06, | |
| "loss": 0.4268, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 1.329073482428115, | |
| "grad_norm": 0.7273839937091774, | |
| "learning_rate": 6.825056979105382e-06, | |
| "loss": 0.4242, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 1.3322683706070286, | |
| "grad_norm": 0.7967716558629548, | |
| "learning_rate": 6.807737705304324e-06, | |
| "loss": 0.4333, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 1.3354632587859425, | |
| "grad_norm": 0.7164636960417264, | |
| "learning_rate": 6.790393444093214e-06, | |
| "loss": 0.4354, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 1.3386581469648562, | |
| "grad_norm": 0.7667891952582243, | |
| "learning_rate": 6.773024435212678e-06, | |
| "loss": 0.4516, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 1.34185303514377, | |
| "grad_norm": 0.6392120988071214, | |
| "learning_rate": 6.7556309187454185e-06, | |
| "loss": 0.4316, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 1.3450479233226837, | |
| "grad_norm": 0.8002715396359115, | |
| "learning_rate": 6.738213135112884e-06, | |
| "loss": 0.4272, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 1.3482428115015974, | |
| "grad_norm": 0.7554333578315849, | |
| "learning_rate": 6.720771325071965e-06, | |
| "loss": 0.4086, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 1.351437699680511, | |
| "grad_norm": 0.8764552858840713, | |
| "learning_rate": 6.703305729711653e-06, | |
| "loss": 0.4174, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 1.354632587859425, | |
| "grad_norm": 0.7445057916080219, | |
| "learning_rate": 6.685816590449708e-06, | |
| "loss": 0.4516, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 1.3578274760383386, | |
| "grad_norm": 0.7848197318433568, | |
| "learning_rate": 6.668304149029331e-06, | |
| "loss": 0.3928, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 1.3610223642172525, | |
| "grad_norm": 0.7472891259669744, | |
| "learning_rate": 6.650768647515813e-06, | |
| "loss": 0.4303, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 1.3642172523961662, | |
| "grad_norm": 0.8185996282961204, | |
| "learning_rate": 6.63321032829319e-06, | |
| "loss": 0.4069, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 1.3674121405750799, | |
| "grad_norm": 0.669092369955815, | |
| "learning_rate": 6.615629434060903e-06, | |
| "loss": 0.4329, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 1.3706070287539935, | |
| "grad_norm": 0.7849739125407705, | |
| "learning_rate": 6.598026207830428e-06, | |
| "loss": 0.4084, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 1.3738019169329074, | |
| "grad_norm": 0.7162838173137105, | |
| "learning_rate": 6.5804008929219284e-06, | |
| "loss": 0.3923, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 1.376996805111821, | |
| "grad_norm": 0.66931493645132, | |
| "learning_rate": 6.562753732960887e-06, | |
| "loss": 0.4008, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 1.3801916932907348, | |
| "grad_norm": 0.7546682360951521, | |
| "learning_rate": 6.545084971874738e-06, | |
| "loss": 0.3741, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 1.3833865814696487, | |
| "grad_norm": 0.6783918756770292, | |
| "learning_rate": 6.527394853889499e-06, | |
| "loss": 0.4063, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 1.3865814696485623, | |
| "grad_norm": 0.7141201132534667, | |
| "learning_rate": 6.5096836235263904e-06, | |
| "loss": 0.43, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 1.389776357827476, | |
| "grad_norm": 0.7605991726766619, | |
| "learning_rate": 6.491951525598461e-06, | |
| "loss": 0.4376, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 1.3929712460063897, | |
| "grad_norm": 0.7319141260491483, | |
| "learning_rate": 6.4741988052071965e-06, | |
| "loss": 0.4082, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 1.3961661341853036, | |
| "grad_norm": 0.7643694519158192, | |
| "learning_rate": 6.45642570773914e-06, | |
| "loss": 0.4081, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 1.3993610223642172, | |
| "grad_norm": 0.722337242212863, | |
| "learning_rate": 6.438632478862495e-06, | |
| "loss": 0.3884, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 1.4025559105431311, | |
| "grad_norm": 0.7849520980727834, | |
| "learning_rate": 6.4208193645237314e-06, | |
| "loss": 0.4201, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 1.4057507987220448, | |
| "grad_norm": 0.7276635927449195, | |
| "learning_rate": 6.402986610944183e-06, | |
| "loss": 0.4186, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 1.4089456869009584, | |
| "grad_norm": 0.6751798380993322, | |
| "learning_rate": 6.385134464616649e-06, | |
| "loss": 0.4304, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 1.4121405750798721, | |
| "grad_norm": 0.7202423850482267, | |
| "learning_rate": 6.367263172301985e-06, | |
| "loss": 0.3789, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 1.415335463258786, | |
| "grad_norm": 0.6077196826039813, | |
| "learning_rate": 6.3493729810256895e-06, | |
| "loss": 0.4144, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 1.4185303514376997, | |
| "grad_norm": 0.738843433648205, | |
| "learning_rate": 6.331464138074493e-06, | |
| "loss": 0.4115, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 1.4217252396166133, | |
| "grad_norm": 0.7241332647195087, | |
| "learning_rate": 6.313536890992935e-06, | |
| "loss": 0.3836, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 1.4249201277955272, | |
| "grad_norm": 0.8509078562677063, | |
| "learning_rate": 6.29559148757995e-06, | |
| "loss": 0.4541, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 1.428115015974441, | |
| "grad_norm": 0.7499421473333086, | |
| "learning_rate": 6.277628175885437e-06, | |
| "loss": 0.4169, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 1.4313099041533546, | |
| "grad_norm": 0.7763796937501032, | |
| "learning_rate": 6.2596472042068275e-06, | |
| "loss": 0.4611, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 1.4345047923322682, | |
| "grad_norm": 0.7284959387032885, | |
| "learning_rate": 6.241648821085666e-06, | |
| "loss": 0.4042, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 1.4376996805111821, | |
| "grad_norm": 0.7181894634050568, | |
| "learning_rate": 6.223633275304157e-06, | |
| "loss": 0.4181, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 1.4408945686900958, | |
| "grad_norm": 0.847329054523862, | |
| "learning_rate": 6.205600815881741e-06, | |
| "loss": 0.4005, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 1.4440894568690097, | |
| "grad_norm": 0.7232793981095568, | |
| "learning_rate": 6.187551692071648e-06, | |
| "loss": 0.3917, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 1.4472843450479234, | |
| "grad_norm": 0.7226887302856251, | |
| "learning_rate": 6.1694861533574445e-06, | |
| "loss": 0.3711, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 1.450479233226837, | |
| "grad_norm": 0.73844581553021, | |
| "learning_rate": 6.1514044494496e-06, | |
| "loss": 0.3914, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 1.4536741214057507, | |
| "grad_norm": 0.7328173367904206, | |
| "learning_rate": 6.133306830282021e-06, | |
| "loss": 0.4771, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 1.4568690095846646, | |
| "grad_norm": 0.739099444257498, | |
| "learning_rate": 6.115193546008602e-06, | |
| "loss": 0.4165, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 1.4600638977635783, | |
| "grad_norm": 0.7949971330404686, | |
| "learning_rate": 6.097064846999774e-06, | |
| "loss": 0.4246, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 1.4632587859424921, | |
| "grad_norm": 0.6791102624208766, | |
| "learning_rate": 6.078920983839032e-06, | |
| "loss": 0.4192, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 1.4664536741214058, | |
| "grad_norm": 0.8073479673710802, | |
| "learning_rate": 6.060762207319479e-06, | |
| "loss": 0.4323, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 1.4696485623003195, | |
| "grad_norm": 0.6804863979605771, | |
| "learning_rate": 6.042588768440358e-06, | |
| "loss": 0.4133, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 1.4728434504792332, | |
| "grad_norm": 0.7145970218309506, | |
| "learning_rate": 6.024400918403581e-06, | |
| "loss": 0.3918, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 1.476038338658147, | |
| "grad_norm": 0.6698235274017075, | |
| "learning_rate": 6.006198908610261e-06, | |
| "loss": 0.373, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 1.4792332268370607, | |
| "grad_norm": 0.7733574722542599, | |
| "learning_rate": 5.987982990657229e-06, | |
| "loss": 0.437, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 1.4824281150159744, | |
| "grad_norm": 0.6873053845458954, | |
| "learning_rate": 5.9697534163335645e-06, | |
| "loss": 0.4473, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 1.4856230031948883, | |
| "grad_norm": 0.7360613158414427, | |
| "learning_rate": 5.95151043761711e-06, | |
| "loss": 0.4101, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 1.488817891373802, | |
| "grad_norm": 0.7023303887076119, | |
| "learning_rate": 5.933254306670995e-06, | |
| "loss": 0.4193, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 1.4920127795527156, | |
| "grad_norm": 0.6097269330396781, | |
| "learning_rate": 5.914985275840135e-06, | |
| "loss": 0.423, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 1.4952076677316293, | |
| "grad_norm": 0.7537445735676624, | |
| "learning_rate": 5.896703597647765e-06, | |
| "loss": 0.3946, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 1.4984025559105432, | |
| "grad_norm": 0.6616967609184584, | |
| "learning_rate": 5.878409524791931e-06, | |
| "loss": 0.4284, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 1.5015974440894568, | |
| "grad_norm": 0.752495801790809, | |
| "learning_rate": 5.8601033101420055e-06, | |
| "loss": 0.4088, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 1.5047923322683707, | |
| "grad_norm": 0.7741168142210769, | |
| "learning_rate": 5.841785206735192e-06, | |
| "loss": 0.3907, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 1.5079872204472844, | |
| "grad_norm": 0.6406185492300456, | |
| "learning_rate": 5.823455467773027e-06, | |
| "loss": 0.3849, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 1.511182108626198, | |
| "grad_norm": 0.6778501202960715, | |
| "learning_rate": 5.805114346617874e-06, | |
| "loss": 0.4269, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 1.5143769968051117, | |
| "grad_norm": 0.7392061873417373, | |
| "learning_rate": 5.786762096789431e-06, | |
| "loss": 0.4127, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 1.5175718849840254, | |
| "grad_norm": 0.7388450985889835, | |
| "learning_rate": 5.768398971961221e-06, | |
| "loss": 0.358, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 1.5207667731629393, | |
| "grad_norm": 0.655106266677778, | |
| "learning_rate": 5.750025225957086e-06, | |
| "loss": 0.4013, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 1.5239616613418532, | |
| "grad_norm": 0.6419997806479123, | |
| "learning_rate": 5.731641112747679e-06, | |
| "loss": 0.4178, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 1.5271565495207668, | |
| "grad_norm": 0.5916675647122845, | |
| "learning_rate": 5.713246886446954e-06, | |
| "loss": 0.4074, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 1.5303514376996805, | |
| "grad_norm": 0.7409131229492478, | |
| "learning_rate": 5.694842801308651e-06, | |
| "loss": 0.4265, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 1.5335463258785942, | |
| "grad_norm": 0.6924800181246356, | |
| "learning_rate": 5.676429111722786e-06, | |
| "loss": 0.4319, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 1.5367412140575079, | |
| "grad_norm": 0.6384862154935599, | |
| "learning_rate": 5.6580060722121325e-06, | |
| "loss": 0.4451, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 1.5399361022364217, | |
| "grad_norm": 0.6020575738289815, | |
| "learning_rate": 5.639573937428699e-06, | |
| "loss": 0.4583, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 1.5431309904153354, | |
| "grad_norm": 0.7242056238178548, | |
| "learning_rate": 5.621132962150216e-06, | |
| "loss": 0.378, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 1.5463258785942493, | |
| "grad_norm": 0.7557911284079408, | |
| "learning_rate": 5.6026834012766155e-06, | |
| "loss": 0.4267, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 1.549520766773163, | |
| "grad_norm": 0.7095401782411899, | |
| "learning_rate": 5.584225509826497e-06, | |
| "loss": 0.3909, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 1.5527156549520766, | |
| "grad_norm": 0.7881660312713781, | |
| "learning_rate": 5.565759542933612e-06, | |
| "loss": 0.4146, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 1.5559105431309903, | |
| "grad_norm": 0.6862528849699476, | |
| "learning_rate": 5.547285755843334e-06, | |
| "loss": 0.4191, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 1.5591054313099042, | |
| "grad_norm": 0.6513704867306412, | |
| "learning_rate": 5.5288044039091335e-06, | |
| "loss": 0.4014, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 1.5623003194888179, | |
| "grad_norm": 0.6820165967193221, | |
| "learning_rate": 5.510315742589042e-06, | |
| "loss": 0.4625, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 1.5654952076677318, | |
| "grad_norm": 0.6813253610979702, | |
| "learning_rate": 5.491820027442126e-06, | |
| "loss": 0.3698, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 1.5686900958466454, | |
| "grad_norm": 0.6207775366733131, | |
| "learning_rate": 5.473317514124958e-06, | |
| "loss": 0.4226, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 1.571884984025559, | |
| "grad_norm": 0.7219582547757427, | |
| "learning_rate": 5.454808458388069e-06, | |
| "loss": 0.3577, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 1.5750798722044728, | |
| "grad_norm": 0.587531289408549, | |
| "learning_rate": 5.436293116072431e-06, | |
| "loss": 0.4408, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 1.5782747603833864, | |
| "grad_norm": 0.5936564578931848, | |
| "learning_rate": 5.417771743105908e-06, | |
| "loss": 0.3885, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 1.5814696485623003, | |
| "grad_norm": 0.6256323503523564, | |
| "learning_rate": 5.399244595499721e-06, | |
| "loss": 0.3866, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 1.5846645367412142, | |
| "grad_norm": 0.6985126034785164, | |
| "learning_rate": 5.380711929344915e-06, | |
| "loss": 0.4288, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 1.5878594249201279, | |
| "grad_norm": 0.6558224942187812, | |
| "learning_rate": 5.362174000808813e-06, | |
| "loss": 0.4128, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 1.5910543130990416, | |
| "grad_norm": 0.7059041917135856, | |
| "learning_rate": 5.343631066131476e-06, | |
| "loss": 0.4302, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 1.5942492012779552, | |
| "grad_norm": 0.6947594122866555, | |
| "learning_rate": 5.325083381622165e-06, | |
| "loss": 0.401, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 1.5974440894568689, | |
| "grad_norm": 0.6832288077833533, | |
| "learning_rate": 5.30653120365579e-06, | |
| "loss": 0.4533, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 1.6006389776357828, | |
| "grad_norm": 0.762224714736928, | |
| "learning_rate": 5.28797478866938e-06, | |
| "loss": 0.4358, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 1.6038338658146964, | |
| "grad_norm": 0.8092440212010764, | |
| "learning_rate": 5.269414393158523e-06, | |
| "loss": 0.3818, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 1.6070287539936103, | |
| "grad_norm": 0.6376129418611447, | |
| "learning_rate": 5.250850273673831e-06, | |
| "loss": 0.3758, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 1.610223642172524, | |
| "grad_norm": 0.6767891876418427, | |
| "learning_rate": 5.232282686817392e-06, | |
| "loss": 0.4082, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 1.6134185303514377, | |
| "grad_norm": 0.6922410705874523, | |
| "learning_rate": 5.213711889239214e-06, | |
| "loss": 0.4136, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 1.6166134185303513, | |
| "grad_norm": 0.7054447503164165, | |
| "learning_rate": 5.195138137633695e-06, | |
| "loss": 0.4522, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 1.619808306709265, | |
| "grad_norm": 0.6668419928190755, | |
| "learning_rate": 5.17656168873606e-06, | |
| "loss": 0.3966, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 1.623003194888179, | |
| "grad_norm": 0.725965975535835, | |
| "learning_rate": 5.157982799318817e-06, | |
| "loss": 0.3691, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 1.6261980830670928, | |
| "grad_norm": 0.7243890485488652, | |
| "learning_rate": 5.139401726188208e-06, | |
| "loss": 0.3885, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 1.6293929712460065, | |
| "grad_norm": 0.6514265757359697, | |
| "learning_rate": 5.120818726180662e-06, | |
| "loss": 0.3729, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 1.6325878594249201, | |
| "grad_norm": 0.7542793277756046, | |
| "learning_rate": 5.1022340561592396e-06, | |
| "loss": 0.4298, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 1.6357827476038338, | |
| "grad_norm": 0.7727874769900948, | |
| "learning_rate": 5.083647973010085e-06, | |
| "loss": 0.4047, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 1.6389776357827475, | |
| "grad_norm": 0.77685653742761, | |
| "learning_rate": 5.065060733638878e-06, | |
| "loss": 0.4556, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 1.6421725239616614, | |
| "grad_norm": 0.727081945835971, | |
| "learning_rate": 5.046472594967279e-06, | |
| "loss": 0.3998, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 1.645367412140575, | |
| "grad_norm": 0.6201784166139842, | |
| "learning_rate": 5.027883813929374e-06, | |
| "loss": 0.3983, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 1.648562300319489, | |
| "grad_norm": 0.6293419628548934, | |
| "learning_rate": 5.009294647468137e-06, | |
| "loss": 0.4013, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 1.6517571884984026, | |
| "grad_norm": 0.6657061153428472, | |
| "learning_rate": 4.990705352531864e-06, | |
| "loss": 0.3764, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 1.6549520766773163, | |
| "grad_norm": 0.7353093271163771, | |
| "learning_rate": 4.972116186070626e-06, | |
| "loss": 0.4076, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 1.65814696485623, | |
| "grad_norm": 0.6983863986741474, | |
| "learning_rate": 4.953527405032723e-06, | |
| "loss": 0.3882, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 1.6613418530351438, | |
| "grad_norm": 0.7717442178352649, | |
| "learning_rate": 4.934939266361123e-06, | |
| "loss": 0.412, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 1.6645367412140575, | |
| "grad_norm": 0.7813215600588967, | |
| "learning_rate": 4.916352026989914e-06, | |
| "loss": 0.4301, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 1.6677316293929714, | |
| "grad_norm": 0.6636520728805212, | |
| "learning_rate": 4.897765943840761e-06, | |
| "loss": 0.3977, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 1.670926517571885, | |
| "grad_norm": 0.7979382389574217, | |
| "learning_rate": 4.87918127381934e-06, | |
| "loss": 0.4131, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 1.6741214057507987, | |
| "grad_norm": 0.7142829720788711, | |
| "learning_rate": 4.860598273811793e-06, | |
| "loss": 0.4427, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 1.6773162939297124, | |
| "grad_norm": 0.719461762977339, | |
| "learning_rate": 4.842017200681185e-06, | |
| "loss": 0.3764, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 1.680511182108626, | |
| "grad_norm": 0.7030911473478874, | |
| "learning_rate": 4.823438311263943e-06, | |
| "loss": 0.4515, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 1.68370607028754, | |
| "grad_norm": 0.6773019245318261, | |
| "learning_rate": 4.804861862366306e-06, | |
| "loss": 0.3632, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 1.6869009584664538, | |
| "grad_norm": 0.6222261749245824, | |
| "learning_rate": 4.786288110760787e-06, | |
| "loss": 0.4396, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 1.6900958466453675, | |
| "grad_norm": 0.6310219300708585, | |
| "learning_rate": 4.767717313182611e-06, | |
| "loss": 0.4233, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 1.6932907348242812, | |
| "grad_norm": 0.6345900048229579, | |
| "learning_rate": 4.74914972632617e-06, | |
| "loss": 0.4146, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 1.6964856230031948, | |
| "grad_norm": 0.8515608156634364, | |
| "learning_rate": 4.730585606841479e-06, | |
| "loss": 0.4206, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 1.6996805111821085, | |
| "grad_norm": 0.7405010020462823, | |
| "learning_rate": 4.7120252113306216e-06, | |
| "loss": 0.4514, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 1.7028753993610224, | |
| "grad_norm": 0.7009601501256475, | |
| "learning_rate": 4.693468796344211e-06, | |
| "loss": 0.4437, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 1.706070287539936, | |
| "grad_norm": 0.7896649309522711, | |
| "learning_rate": 4.6749166183778375e-06, | |
| "loss": 0.4411, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 1.70926517571885, | |
| "grad_norm": 0.66481894530652, | |
| "learning_rate": 4.656368933868525e-06, | |
| "loss": 0.4003, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 1.7124600638977636, | |
| "grad_norm": 0.6803181892467599, | |
| "learning_rate": 4.637825999191189e-06, | |
| "loss": 0.4078, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 1.7156549520766773, | |
| "grad_norm": 0.7907155834251054, | |
| "learning_rate": 4.619288070655086e-06, | |
| "loss": 0.3947, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 1.718849840255591, | |
| "grad_norm": 0.6202721344766233, | |
| "learning_rate": 4.600755404500281e-06, | |
| "loss": 0.384, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 1.7220447284345048, | |
| "grad_norm": 0.6323756992860589, | |
| "learning_rate": 4.582228256894093e-06, | |
| "loss": 0.4088, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 1.7252396166134185, | |
| "grad_norm": 0.8071309721816631, | |
| "learning_rate": 4.56370688392757e-06, | |
| "loss": 0.4265, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 1.7284345047923324, | |
| "grad_norm": 0.6983078682901872, | |
| "learning_rate": 4.545191541611933e-06, | |
| "loss": 0.4343, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 1.731629392971246, | |
| "grad_norm": 0.7064389255875589, | |
| "learning_rate": 4.526682485875044e-06, | |
| "loss": 0.4311, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 1.7348242811501597, | |
| "grad_norm": 0.686557281979509, | |
| "learning_rate": 4.508179972557875e-06, | |
| "loss": 0.4624, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 1.7380191693290734, | |
| "grad_norm": 0.6391560477651345, | |
| "learning_rate": 4.489684257410959e-06, | |
| "loss": 0.3948, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 1.741214057507987, | |
| "grad_norm": 0.7274004317464743, | |
| "learning_rate": 4.471195596090867e-06, | |
| "loss": 0.4383, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 1.744408945686901, | |
| "grad_norm": 0.671746811039263, | |
| "learning_rate": 4.452714244156667e-06, | |
| "loss": 0.4214, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 1.7476038338658149, | |
| "grad_norm": 0.711865936170259, | |
| "learning_rate": 4.434240457066388e-06, | |
| "loss": 0.4467, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 1.7507987220447285, | |
| "grad_norm": 0.6189430505826393, | |
| "learning_rate": 4.415774490173504e-06, | |
| "loss": 0.4564, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 1.7539936102236422, | |
| "grad_norm": 0.6910611899978649, | |
| "learning_rate": 4.397316598723385e-06, | |
| "loss": 0.3772, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 1.7571884984025559, | |
| "grad_norm": 0.6304070800723548, | |
| "learning_rate": 4.3788670378497836e-06, | |
| "loss": 0.4319, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 1.7603833865814695, | |
| "grad_norm": 0.7333431181761982, | |
| "learning_rate": 4.360426062571303e-06, | |
| "loss": 0.4113, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 1.7635782747603834, | |
| "grad_norm": 0.6622969583504559, | |
| "learning_rate": 4.341993927787871e-06, | |
| "loss": 0.4356, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 1.766773162939297, | |
| "grad_norm": 0.6471194513704625, | |
| "learning_rate": 4.323570888277215e-06, | |
| "loss": 0.3546, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 1.769968051118211, | |
| "grad_norm": 0.6810785974779886, | |
| "learning_rate": 4.305157198691351e-06, | |
| "loss": 0.4073, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 1.7731629392971247, | |
| "grad_norm": 0.7339503997464327, | |
| "learning_rate": 4.286753113553049e-06, | |
| "loss": 0.4656, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 1.7763578274760383, | |
| "grad_norm": 0.6489710558856068, | |
| "learning_rate": 4.268358887252322e-06, | |
| "loss": 0.3985, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 1.779552715654952, | |
| "grad_norm": 0.718662578260408, | |
| "learning_rate": 4.249974774042915e-06, | |
| "loss": 0.3576, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 1.7827476038338657, | |
| "grad_norm": 0.738057529915224, | |
| "learning_rate": 4.231601028038781e-06, | |
| "loss": 0.4163, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 1.7859424920127795, | |
| "grad_norm": 0.6705392426998533, | |
| "learning_rate": 4.2132379032105695e-06, | |
| "loss": 0.3962, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 1.7891373801916934, | |
| "grad_norm": 0.7591308369500366, | |
| "learning_rate": 4.194885653382128e-06, | |
| "loss": 0.4214, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 1.792332268370607, | |
| "grad_norm": 0.6420351490088527, | |
| "learning_rate": 4.176544532226974e-06, | |
| "loss": 0.423, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 1.7955271565495208, | |
| "grad_norm": 0.6691805563891343, | |
| "learning_rate": 4.158214793264808e-06, | |
| "loss": 0.4048, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 1.7987220447284344, | |
| "grad_norm": 0.795969904519421, | |
| "learning_rate": 4.139896689857995e-06, | |
| "loss": 0.4095, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 1.8019169329073481, | |
| "grad_norm": 0.6883215009844186, | |
| "learning_rate": 4.121590475208071e-06, | |
| "loss": 0.3969, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 1.805111821086262, | |
| "grad_norm": 0.636119055044423, | |
| "learning_rate": 4.1032964023522366e-06, | |
| "loss": 0.4361, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 1.8083067092651757, | |
| "grad_norm": 0.7202448770364853, | |
| "learning_rate": 4.085014724159866e-06, | |
| "loss": 0.3922, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 1.8115015974440896, | |
| "grad_norm": 0.6465617444869258, | |
| "learning_rate": 4.066745693329008e-06, | |
| "loss": 0.4258, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 1.8146964856230032, | |
| "grad_norm": 0.7002728972461814, | |
| "learning_rate": 4.0484895623828906e-06, | |
| "loss": 0.464, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 1.817891373801917, | |
| "grad_norm": 0.7864770283392628, | |
| "learning_rate": 4.030246583666437e-06, | |
| "loss": 0.4247, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 1.8210862619808306, | |
| "grad_norm": 0.6572550114907124, | |
| "learning_rate": 4.012017009342773e-06, | |
| "loss": 0.3674, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 1.8242811501597445, | |
| "grad_norm": 0.6482078420972358, | |
| "learning_rate": 3.99380109138974e-06, | |
| "loss": 0.426, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 1.8274760383386581, | |
| "grad_norm": 0.6089049101637908, | |
| "learning_rate": 3.97559908159642e-06, | |
| "loss": 0.4648, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 1.830670926517572, | |
| "grad_norm": 0.654869830004242, | |
| "learning_rate": 3.9574112315596425e-06, | |
| "loss": 0.4081, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 1.8338658146964857, | |
| "grad_norm": 0.6766372004787461, | |
| "learning_rate": 3.9392377926805226e-06, | |
| "loss": 0.3991, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 1.8370607028753994, | |
| "grad_norm": 0.6156484093536729, | |
| "learning_rate": 3.92107901616097e-06, | |
| "loss": 0.4051, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 1.840255591054313, | |
| "grad_norm": 0.7209332365753569, | |
| "learning_rate": 3.9029351530002264e-06, | |
| "loss": 0.4325, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 1.8434504792332267, | |
| "grad_norm": 0.6884876431700547, | |
| "learning_rate": 3.884806453991399e-06, | |
| "loss": 0.3899, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 1.8466453674121406, | |
| "grad_norm": 0.6895779669790771, | |
| "learning_rate": 3.866693169717982e-06, | |
| "loss": 0.4257, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 1.8498402555910545, | |
| "grad_norm": 0.591765095160724, | |
| "learning_rate": 3.848595550550401e-06, | |
| "loss": 0.4319, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 1.8530351437699681, | |
| "grad_norm": 0.6890579406960579, | |
| "learning_rate": 3.830513846642556e-06, | |
| "loss": 0.4109, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 1.8562300319488818, | |
| "grad_norm": 0.6475782916684093, | |
| "learning_rate": 3.8124483079283546e-06, | |
| "loss": 0.3852, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 1.8594249201277955, | |
| "grad_norm": 0.6079636893365562, | |
| "learning_rate": 3.7943991841182586e-06, | |
| "loss": 0.4199, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 1.8626198083067091, | |
| "grad_norm": 0.5989261528100488, | |
| "learning_rate": 3.7763667246958447e-06, | |
| "loss": 0.3634, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 1.865814696485623, | |
| "grad_norm": 0.7170408623649442, | |
| "learning_rate": 3.758351178914336e-06, | |
| "loss": 0.4269, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 1.8690095846645367, | |
| "grad_norm": 0.6802970749740577, | |
| "learning_rate": 3.7403527957931716e-06, | |
| "loss": 0.4, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 1.8722044728434506, | |
| "grad_norm": 0.7045362752006943, | |
| "learning_rate": 3.7223718241145646e-06, | |
| "loss": 0.4146, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 1.8753993610223643, | |
| "grad_norm": 0.7431509526456225, | |
| "learning_rate": 3.7044085124200517e-06, | |
| "loss": 0.3965, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 1.878594249201278, | |
| "grad_norm": 0.7345778010151496, | |
| "learning_rate": 3.6864631090070656e-06, | |
| "loss": 0.3875, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 1.8817891373801916, | |
| "grad_norm": 0.6242646310295729, | |
| "learning_rate": 3.668535861925509e-06, | |
| "loss": 0.3929, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 1.8849840255591053, | |
| "grad_norm": 0.807998432581044, | |
| "learning_rate": 3.650627018974312e-06, | |
| "loss": 0.4604, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 1.8881789137380192, | |
| "grad_norm": 0.7330025954364637, | |
| "learning_rate": 3.632736827698015e-06, | |
| "loss": 0.3935, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 1.891373801916933, | |
| "grad_norm": 0.6709313587984049, | |
| "learning_rate": 3.6148655353833518e-06, | |
| "loss": 0.4509, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 1.8945686900958467, | |
| "grad_norm": 0.5919122845314795, | |
| "learning_rate": 3.5970133890558184e-06, | |
| "loss": 0.3787, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 1.8977635782747604, | |
| "grad_norm": 0.6565056726940224, | |
| "learning_rate": 3.5791806354762702e-06, | |
| "loss": 0.4142, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 1.900958466453674, | |
| "grad_norm": 0.6240820286909862, | |
| "learning_rate": 3.5613675211375066e-06, | |
| "loss": 0.4014, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 1.9041533546325877, | |
| "grad_norm": 0.618594688160155, | |
| "learning_rate": 3.5435742922608618e-06, | |
| "loss": 0.4526, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 1.9073482428115016, | |
| "grad_norm": 0.6258816708488183, | |
| "learning_rate": 3.525801194792805e-06, | |
| "loss": 0.3905, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 1.9105431309904153, | |
| "grad_norm": 0.6273674561595595, | |
| "learning_rate": 3.508048474401541e-06, | |
| "loss": 0.421, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 1.9137380191693292, | |
| "grad_norm": 0.7106954736201235, | |
| "learning_rate": 3.4903163764736104e-06, | |
| "loss": 0.4552, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 1.9169329073482428, | |
| "grad_norm": 0.685935308297769, | |
| "learning_rate": 3.4726051461105016e-06, | |
| "loss": 0.4295, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 1.9201277955271565, | |
| "grad_norm": 0.7048971741463542, | |
| "learning_rate": 3.4549150281252635e-06, | |
| "loss": 0.4589, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 1.9233226837060702, | |
| "grad_norm": 0.6779199644574317, | |
| "learning_rate": 3.437246267039115e-06, | |
| "loss": 0.3862, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 1.926517571884984, | |
| "grad_norm": 0.6370062532929827, | |
| "learning_rate": 3.419599107078073e-06, | |
| "loss": 0.4318, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 1.9297124600638977, | |
| "grad_norm": 0.6664293929584083, | |
| "learning_rate": 3.401973792169574e-06, | |
| "loss": 0.4114, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 1.9329073482428116, | |
| "grad_norm": 0.6801661249853935, | |
| "learning_rate": 3.384370565939098e-06, | |
| "loss": 0.4056, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 1.9361022364217253, | |
| "grad_norm": 0.6088422098648698, | |
| "learning_rate": 3.3667896717068105e-06, | |
| "loss": 0.4354, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 1.939297124600639, | |
| "grad_norm": 0.604568022948054, | |
| "learning_rate": 3.34923135248419e-06, | |
| "loss": 0.4406, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 1.9424920127795526, | |
| "grad_norm": 0.6480032914789063, | |
| "learning_rate": 3.33169585097067e-06, | |
| "loss": 0.3947, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 1.9456869009584663, | |
| "grad_norm": 0.6813468114241855, | |
| "learning_rate": 3.314183409550293e-06, | |
| "loss": 0.4278, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 1.9488817891373802, | |
| "grad_norm": 0.6963996997813214, | |
| "learning_rate": 3.2966942702883494e-06, | |
| "loss": 0.4375, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 1.952076677316294, | |
| "grad_norm": 0.6666051546482046, | |
| "learning_rate": 3.279228674928035e-06, | |
| "loss": 0.3713, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 1.9552715654952078, | |
| "grad_norm": 0.6465525719691291, | |
| "learning_rate": 3.261786864887117e-06, | |
| "loss": 0.4502, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 1.9584664536741214, | |
| "grad_norm": 0.6823761874452812, | |
| "learning_rate": 3.244369081254585e-06, | |
| "loss": 0.4154, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 1.961661341853035, | |
| "grad_norm": 0.7074455898920645, | |
| "learning_rate": 3.226975564787322e-06, | |
| "loss": 0.4445, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 1.9648562300319488, | |
| "grad_norm": 0.767027927273249, | |
| "learning_rate": 3.209606555906788e-06, | |
| "loss": 0.3778, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 1.9680511182108626, | |
| "grad_norm": 0.7511580726189006, | |
| "learning_rate": 3.192262294695679e-06, | |
| "loss": 0.4058, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 1.9712460063897763, | |
| "grad_norm": 0.6400233820575995, | |
| "learning_rate": 3.174943020894618e-06, | |
| "loss": 0.4111, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 1.9744408945686902, | |
| "grad_norm": 0.6063251534219226, | |
| "learning_rate": 3.1576489738988457e-06, | |
| "loss": 0.3939, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 1.9776357827476039, | |
| "grad_norm": 0.7399192760884882, | |
| "learning_rate": 3.140380392754901e-06, | |
| "loss": 0.4171, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 1.9808306709265175, | |
| "grad_norm": 0.6044585985743741, | |
| "learning_rate": 3.12313751615732e-06, | |
| "loss": 0.4044, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 1.9840255591054312, | |
| "grad_norm": 0.6646425537074524, | |
| "learning_rate": 3.1059205824453446e-06, | |
| "loss": 0.4111, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 1.9872204472843449, | |
| "grad_norm": 0.7426412039388861, | |
| "learning_rate": 3.0887298295996183e-06, | |
| "loss": 0.4106, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 1.9904153354632588, | |
| "grad_norm": 0.7001604424708607, | |
| "learning_rate": 3.0715654952388957e-06, | |
| "loss": 0.4245, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 1.9936102236421727, | |
| "grad_norm": 0.6520937772952098, | |
| "learning_rate": 3.054427816616773e-06, | |
| "loss": 0.4505, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 1.9968051118210863, | |
| "grad_norm": 0.6491525580746756, | |
| "learning_rate": 3.0373170306183885e-06, | |
| "loss": 0.3938, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 0.6087684003574786, | |
| "learning_rate": 3.020233373757162e-06, | |
| "loss": 0.3807, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 2.0031948881789137, | |
| "grad_norm": 0.796879751420443, | |
| "learning_rate": 3.0031770821715233e-06, | |
| "loss": 0.3505, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 2.0063897763578273, | |
| "grad_norm": 0.6970718865013494, | |
| "learning_rate": 2.9861483916216404e-06, | |
| "loss": 0.3527, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 2.009584664536741, | |
| "grad_norm": 0.7046188469203205, | |
| "learning_rate": 2.969147537486175e-06, | |
| "loss": 0.3574, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 2.012779552715655, | |
| "grad_norm": 0.6005350583532081, | |
| "learning_rate": 2.952174754759012e-06, | |
| "loss": 0.3127, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 2.015974440894569, | |
| "grad_norm": 0.7209443417400784, | |
| "learning_rate": 2.935230278046025e-06, | |
| "loss": 0.3729, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 2.0191693290734825, | |
| "grad_norm": 0.8211482482920919, | |
| "learning_rate": 2.9183143415618297e-06, | |
| "loss": 0.319, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 2.022364217252396, | |
| "grad_norm": 0.809977750202282, | |
| "learning_rate": 2.9014271791265403e-06, | |
| "loss": 0.3497, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 2.02555910543131, | |
| "grad_norm": 0.6927159562036598, | |
| "learning_rate": 2.8845690241625437e-06, | |
| "loss": 0.3024, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 2.0287539936102235, | |
| "grad_norm": 0.668355825951265, | |
| "learning_rate": 2.867740109691277e-06, | |
| "loss": 0.3549, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 2.0319488817891376, | |
| "grad_norm": 0.6321723989880009, | |
| "learning_rate": 2.850940668329996e-06, | |
| "loss": 0.33, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 2.0351437699680512, | |
| "grad_norm": 0.6012009576345978, | |
| "learning_rate": 2.8341709322885624e-06, | |
| "loss": 0.3694, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 2.038338658146965, | |
| "grad_norm": 0.6253436415906143, | |
| "learning_rate": 2.817431133366246e-06, | |
| "loss": 0.3398, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 2.0415335463258786, | |
| "grad_norm": 0.6206197586366787, | |
| "learning_rate": 2.800721502948506e-06, | |
| "loss": 0.3874, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 2.0447284345047922, | |
| "grad_norm": 0.7037107523881407, | |
| "learning_rate": 2.7840422720037943e-06, | |
| "loss": 0.3482, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 2.047923322683706, | |
| "grad_norm": 0.6125405702932848, | |
| "learning_rate": 2.767393671080376e-06, | |
| "loss": 0.3626, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 2.0511182108626196, | |
| "grad_norm": 0.6626098393166985, | |
| "learning_rate": 2.7507759303031257e-06, | |
| "loss": 0.355, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 2.0543130990415337, | |
| "grad_norm": 0.6089018851505401, | |
| "learning_rate": 2.7341892793703594e-06, | |
| "loss": 0.3335, | |
| "step": 643 | |
| }, | |
| { | |
| "epoch": 2.0575079872204474, | |
| "grad_norm": 0.6251075562703512, | |
| "learning_rate": 2.7176339475506515e-06, | |
| "loss": 0.3276, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 2.060702875399361, | |
| "grad_norm": 0.6414220113333703, | |
| "learning_rate": 2.7011101636796677e-06, | |
| "loss": 0.3414, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 2.0638977635782747, | |
| "grad_norm": 0.6394774601567328, | |
| "learning_rate": 2.6846181561570085e-06, | |
| "loss": 0.3493, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 2.0670926517571884, | |
| "grad_norm": 0.6872192296218651, | |
| "learning_rate": 2.668158152943039e-06, | |
| "loss": 0.3652, | |
| "step": 647 | |
| }, | |
| { | |
| "epoch": 2.070287539936102, | |
| "grad_norm": 0.6476410999573231, | |
| "learning_rate": 2.651730381555754e-06, | |
| "loss": 0.3551, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 2.073482428115016, | |
| "grad_norm": 0.654984785941934, | |
| "learning_rate": 2.635335069067617e-06, | |
| "loss": 0.3163, | |
| "step": 649 | |
| }, | |
| { | |
| "epoch": 2.07667731629393, | |
| "grad_norm": 0.6008860032389618, | |
| "learning_rate": 2.618972442102432e-06, | |
| "loss": 0.3125, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 2.0798722044728435, | |
| "grad_norm": 0.5630007847350497, | |
| "learning_rate": 2.602642726832212e-06, | |
| "loss": 0.3599, | |
| "step": 651 | |
| }, | |
| { | |
| "epoch": 2.083067092651757, | |
| "grad_norm": 0.6548423010020595, | |
| "learning_rate": 2.5863461489740403e-06, | |
| "loss": 0.3545, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 2.086261980830671, | |
| "grad_norm": 0.720979078940375, | |
| "learning_rate": 2.57008293378697e-06, | |
| "loss": 0.3461, | |
| "step": 653 | |
| }, | |
| { | |
| "epoch": 2.0894568690095845, | |
| "grad_norm": 0.6131533408331629, | |
| "learning_rate": 2.553853306068888e-06, | |
| "loss": 0.3184, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 2.0926517571884986, | |
| "grad_norm": 0.7007832032216074, | |
| "learning_rate": 2.5376574901534303e-06, | |
| "loss": 0.328, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 2.0958466453674123, | |
| "grad_norm": 0.6545588991789442, | |
| "learning_rate": 2.5214957099068613e-06, | |
| "loss": 0.2982, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 2.099041533546326, | |
| "grad_norm": 0.5863148928130069, | |
| "learning_rate": 2.5053681887249916e-06, | |
| "loss": 0.3363, | |
| "step": 657 | |
| }, | |
| { | |
| "epoch": 2.1022364217252396, | |
| "grad_norm": 0.5301073800260551, | |
| "learning_rate": 2.4892751495300893e-06, | |
| "loss": 0.3115, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 2.1054313099041533, | |
| "grad_norm": 0.6542479327855456, | |
| "learning_rate": 2.4732168147677927e-06, | |
| "loss": 0.3703, | |
| "step": 659 | |
| }, | |
| { | |
| "epoch": 2.108626198083067, | |
| "grad_norm": 0.5886520139377257, | |
| "learning_rate": 2.4571934064040364e-06, | |
| "loss": 0.3059, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 2.1118210862619806, | |
| "grad_norm": 0.6484195186593826, | |
| "learning_rate": 2.4412051459219945e-06, | |
| "loss": 0.3342, | |
| "step": 661 | |
| }, | |
| { | |
| "epoch": 2.1150159744408947, | |
| "grad_norm": 0.6221889551743696, | |
| "learning_rate": 2.425252254319002e-06, | |
| "loss": 0.3322, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 2.1182108626198084, | |
| "grad_norm": 0.6267777822744369, | |
| "learning_rate": 2.4093349521035105e-06, | |
| "loss": 0.3322, | |
| "step": 663 | |
| }, | |
| { | |
| "epoch": 2.121405750798722, | |
| "grad_norm": 0.6167910293671043, | |
| "learning_rate": 2.3934534592920416e-06, | |
| "loss": 0.3397, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 2.1246006389776357, | |
| "grad_norm": 0.624875888457256, | |
| "learning_rate": 2.3776079954061385e-06, | |
| "loss": 0.3193, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 2.1277955271565494, | |
| "grad_norm": 0.5365887324860252, | |
| "learning_rate": 2.3617987794693358e-06, | |
| "loss": 0.3103, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 2.130990415335463, | |
| "grad_norm": 0.5822223965195731, | |
| "learning_rate": 2.3460260300041355e-06, | |
| "loss": 0.3798, | |
| "step": 667 | |
| }, | |
| { | |
| "epoch": 2.134185303514377, | |
| "grad_norm": 0.5880605470571858, | |
| "learning_rate": 2.3302899650289773e-06, | |
| "loss": 0.317, | |
| "step": 668 | |
| }, | |
| { | |
| "epoch": 2.137380191693291, | |
| "grad_norm": 0.6029997324505392, | |
| "learning_rate": 2.314590802055232e-06, | |
| "loss": 0.3361, | |
| "step": 669 | |
| }, | |
| { | |
| "epoch": 2.1405750798722045, | |
| "grad_norm": 0.6879520601227666, | |
| "learning_rate": 2.2989287580841985e-06, | |
| "loss": 0.3282, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 2.143769968051118, | |
| "grad_norm": 0.644984182089056, | |
| "learning_rate": 2.2833040496040925e-06, | |
| "loss": 0.3991, | |
| "step": 671 | |
| }, | |
| { | |
| "epoch": 2.146964856230032, | |
| "grad_norm": 0.6134578939862448, | |
| "learning_rate": 2.267716892587062e-06, | |
| "loss": 0.3324, | |
| "step": 672 | |
| }, | |
| { | |
| "epoch": 2.1501597444089455, | |
| "grad_norm": 0.619449643912175, | |
| "learning_rate": 2.252167502486205e-06, | |
| "loss": 0.3411, | |
| "step": 673 | |
| }, | |
| { | |
| "epoch": 2.1533546325878596, | |
| "grad_norm": 0.5532087352133281, | |
| "learning_rate": 2.2366560942325833e-06, | |
| "loss": 0.3841, | |
| "step": 674 | |
| }, | |
| { | |
| "epoch": 2.1565495207667733, | |
| "grad_norm": 0.574888067545758, | |
| "learning_rate": 2.2211828822322547e-06, | |
| "loss": 0.3869, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 2.159744408945687, | |
| "grad_norm": 0.5891390751264961, | |
| "learning_rate": 2.205748080363316e-06, | |
| "loss": 0.319, | |
| "step": 676 | |
| }, | |
| { | |
| "epoch": 2.1629392971246006, | |
| "grad_norm": 0.5512648483481606, | |
| "learning_rate": 2.190351901972935e-06, | |
| "loss": 0.326, | |
| "step": 677 | |
| }, | |
| { | |
| "epoch": 2.1661341853035143, | |
| "grad_norm": 0.6477399909641972, | |
| "learning_rate": 2.1749945598744076e-06, | |
| "loss": 0.2812, | |
| "step": 678 | |
| }, | |
| { | |
| "epoch": 2.169329073482428, | |
| "grad_norm": 0.5713512138383214, | |
| "learning_rate": 2.159676266344222e-06, | |
| "loss": 0.312, | |
| "step": 679 | |
| }, | |
| { | |
| "epoch": 2.1725239616613417, | |
| "grad_norm": 0.5926489667194774, | |
| "learning_rate": 2.144397233119112e-06, | |
| "loss": 0.3057, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 2.1757188498402558, | |
| "grad_norm": 0.6329817251734594, | |
| "learning_rate": 2.1291576713931382e-06, | |
| "loss": 0.3277, | |
| "step": 681 | |
| }, | |
| { | |
| "epoch": 2.1789137380191694, | |
| "grad_norm": 0.662841400229032, | |
| "learning_rate": 2.1139577918147715e-06, | |
| "loss": 0.3495, | |
| "step": 682 | |
| }, | |
| { | |
| "epoch": 2.182108626198083, | |
| "grad_norm": 0.5585397178700957, | |
| "learning_rate": 2.0987978044839707e-06, | |
| "loss": 0.3369, | |
| "step": 683 | |
| }, | |
| { | |
| "epoch": 2.1853035143769968, | |
| "grad_norm": 0.6043833320321034, | |
| "learning_rate": 2.0836779189492925e-06, | |
| "loss": 0.3818, | |
| "step": 684 | |
| }, | |
| { | |
| "epoch": 2.1884984025559104, | |
| "grad_norm": 0.6296409627632911, | |
| "learning_rate": 2.068598344204981e-06, | |
| "loss": 0.3485, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 2.191693290734824, | |
| "grad_norm": 0.5920218690789573, | |
| "learning_rate": 2.053559288688086e-06, | |
| "loss": 0.3341, | |
| "step": 686 | |
| }, | |
| { | |
| "epoch": 2.194888178913738, | |
| "grad_norm": 0.5980200487693809, | |
| "learning_rate": 2.0385609602755878e-06, | |
| "loss": 0.3602, | |
| "step": 687 | |
| }, | |
| { | |
| "epoch": 2.198083067092652, | |
| "grad_norm": 0.573442518449572, | |
| "learning_rate": 2.02360356628151e-06, | |
| "loss": 0.3478, | |
| "step": 688 | |
| }, | |
| { | |
| "epoch": 2.2012779552715656, | |
| "grad_norm": 0.6391131250417024, | |
| "learning_rate": 2.0086873134540626e-06, | |
| "loss": 0.3702, | |
| "step": 689 | |
| }, | |
| { | |
| "epoch": 2.2044728434504792, | |
| "grad_norm": 0.5708177643091218, | |
| "learning_rate": 1.9938124079727874e-06, | |
| "loss": 0.4001, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 2.207667731629393, | |
| "grad_norm": 0.5727453414449702, | |
| "learning_rate": 1.9789790554456977e-06, | |
| "loss": 0.3731, | |
| "step": 691 | |
| }, | |
| { | |
| "epoch": 2.2108626198083066, | |
| "grad_norm": 0.5379357735943615, | |
| "learning_rate": 1.9641874609064443e-06, | |
| "loss": 0.2939, | |
| "step": 692 | |
| }, | |
| { | |
| "epoch": 2.2140575079872207, | |
| "grad_norm": 0.5871823007917029, | |
| "learning_rate": 1.9494378288114816e-06, | |
| "loss": 0.3367, | |
| "step": 693 | |
| }, | |
| { | |
| "epoch": 2.2172523961661343, | |
| "grad_norm": 0.5545788841888406, | |
| "learning_rate": 1.9347303630372373e-06, | |
| "loss": 0.3841, | |
| "step": 694 | |
| }, | |
| { | |
| "epoch": 2.220447284345048, | |
| "grad_norm": 0.5809745870760499, | |
| "learning_rate": 1.9200652668772924e-06, | |
| "loss": 0.3744, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 2.2236421725239617, | |
| "grad_norm": 0.61731448322226, | |
| "learning_rate": 1.9054427430395828e-06, | |
| "loss": 0.3337, | |
| "step": 696 | |
| }, | |
| { | |
| "epoch": 2.2268370607028753, | |
| "grad_norm": 0.6443806470537478, | |
| "learning_rate": 1.890862993643583e-06, | |
| "loss": 0.3178, | |
| "step": 697 | |
| }, | |
| { | |
| "epoch": 2.230031948881789, | |
| "grad_norm": 0.5521573052667518, | |
| "learning_rate": 1.8763262202175204e-06, | |
| "loss": 0.3031, | |
| "step": 698 | |
| }, | |
| { | |
| "epoch": 2.2332268370607027, | |
| "grad_norm": 0.6068058013107389, | |
| "learning_rate": 1.8618326236955908e-06, | |
| "loss": 0.3506, | |
| "step": 699 | |
| }, | |
| { | |
| "epoch": 2.236421725239617, | |
| "grad_norm": 0.7265520680014468, | |
| "learning_rate": 1.8473824044151762e-06, | |
| "loss": 0.3467, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 2.2396166134185305, | |
| "grad_norm": 0.5898120244137373, | |
| "learning_rate": 1.8329757621140748e-06, | |
| "loss": 0.2709, | |
| "step": 701 | |
| }, | |
| { | |
| "epoch": 2.242811501597444, | |
| "grad_norm": 0.5982923461568086, | |
| "learning_rate": 1.81861289592775e-06, | |
| "loss": 0.359, | |
| "step": 702 | |
| }, | |
| { | |
| "epoch": 2.246006389776358, | |
| "grad_norm": 0.67972593546476, | |
| "learning_rate": 1.8042940043865658e-06, | |
| "loss": 0.3312, | |
| "step": 703 | |
| }, | |
| { | |
| "epoch": 2.2492012779552715, | |
| "grad_norm": 0.5901426297699548, | |
| "learning_rate": 1.7900192854130465e-06, | |
| "loss": 0.3302, | |
| "step": 704 | |
| }, | |
| { | |
| "epoch": 2.252396166134185, | |
| "grad_norm": 0.60108804841194, | |
| "learning_rate": 1.7757889363191484e-06, | |
| "loss": 0.3719, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 2.255591054313099, | |
| "grad_norm": 0.6096808438842122, | |
| "learning_rate": 1.7616031538035189e-06, | |
| "loss": 0.3416, | |
| "step": 706 | |
| }, | |
| { | |
| "epoch": 2.258785942492013, | |
| "grad_norm": 0.5925477125196335, | |
| "learning_rate": 1.7474621339487925e-06, | |
| "loss": 0.2894, | |
| "step": 707 | |
| }, | |
| { | |
| "epoch": 2.2619808306709266, | |
| "grad_norm": 0.614301208774155, | |
| "learning_rate": 1.7333660722188667e-06, | |
| "loss": 0.3121, | |
| "step": 708 | |
| }, | |
| { | |
| "epoch": 2.2651757188498403, | |
| "grad_norm": 0.6007739770559021, | |
| "learning_rate": 1.7193151634562071e-06, | |
| "loss": 0.3032, | |
| "step": 709 | |
| }, | |
| { | |
| "epoch": 2.268370607028754, | |
| "grad_norm": 0.5962665238863614, | |
| "learning_rate": 1.7053096018791588e-06, | |
| "loss": 0.3448, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 2.2715654952076676, | |
| "grad_norm": 0.6320189634924153, | |
| "learning_rate": 1.691349581079249e-06, | |
| "loss": 0.3498, | |
| "step": 711 | |
| }, | |
| { | |
| "epoch": 2.2747603833865817, | |
| "grad_norm": 0.5508132536324674, | |
| "learning_rate": 1.6774352940185269e-06, | |
| "loss": 0.319, | |
| "step": 712 | |
| }, | |
| { | |
| "epoch": 2.2779552715654954, | |
| "grad_norm": 0.5831514260830296, | |
| "learning_rate": 1.663566933026879e-06, | |
| "loss": 0.2965, | |
| "step": 713 | |
| }, | |
| { | |
| "epoch": 2.281150159744409, | |
| "grad_norm": 0.6756524107691178, | |
| "learning_rate": 1.6497446897993885e-06, | |
| "loss": 0.3431, | |
| "step": 714 | |
| }, | |
| { | |
| "epoch": 2.2843450479233227, | |
| "grad_norm": 0.629012585363301, | |
| "learning_rate": 1.6359687553936714e-06, | |
| "loss": 0.3044, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 2.2875399361022364, | |
| "grad_norm": 0.5674347593064819, | |
| "learning_rate": 1.6222393202272414e-06, | |
| "loss": 0.3367, | |
| "step": 716 | |
| }, | |
| { | |
| "epoch": 2.29073482428115, | |
| "grad_norm": 0.5972708434410853, | |
| "learning_rate": 1.6085565740748825e-06, | |
| "loss": 0.3704, | |
| "step": 717 | |
| }, | |
| { | |
| "epoch": 2.2939297124600637, | |
| "grad_norm": 0.6370082376546686, | |
| "learning_rate": 1.5949207060660138e-06, | |
| "loss": 0.331, | |
| "step": 718 | |
| }, | |
| { | |
| "epoch": 2.297124600638978, | |
| "grad_norm": 0.6145160755163914, | |
| "learning_rate": 1.581331904682089e-06, | |
| "loss": 0.2636, | |
| "step": 719 | |
| }, | |
| { | |
| "epoch": 2.3003194888178915, | |
| "grad_norm": 0.5424418826013051, | |
| "learning_rate": 1.5677903577539806e-06, | |
| "loss": 0.3342, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 2.303514376996805, | |
| "grad_norm": 0.5583080472065144, | |
| "learning_rate": 1.5542962524593869e-06, | |
| "loss": 0.3383, | |
| "step": 721 | |
| }, | |
| { | |
| "epoch": 2.306709265175719, | |
| "grad_norm": 0.5412287113161306, | |
| "learning_rate": 1.54084977532025e-06, | |
| "loss": 0.292, | |
| "step": 722 | |
| }, | |
| { | |
| "epoch": 2.3099041533546325, | |
| "grad_norm": 0.6030318899286576, | |
| "learning_rate": 1.5274511122001684e-06, | |
| "loss": 0.3022, | |
| "step": 723 | |
| }, | |
| { | |
| "epoch": 2.313099041533546, | |
| "grad_norm": 0.5405867270565389, | |
| "learning_rate": 1.5141004483018323e-06, | |
| "loss": 0.3472, | |
| "step": 724 | |
| }, | |
| { | |
| "epoch": 2.31629392971246, | |
| "grad_norm": 0.6308143569571145, | |
| "learning_rate": 1.5007979681644696e-06, | |
| "loss": 0.3837, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 2.319488817891374, | |
| "grad_norm": 0.5826126513421058, | |
| "learning_rate": 1.4875438556612836e-06, | |
| "loss": 0.3448, | |
| "step": 726 | |
| }, | |
| { | |
| "epoch": 2.3226837060702876, | |
| "grad_norm": 0.6221338096279452, | |
| "learning_rate": 1.474338293996917e-06, | |
| "loss": 0.3381, | |
| "step": 727 | |
| }, | |
| { | |
| "epoch": 2.3258785942492013, | |
| "grad_norm": 0.6151988705616945, | |
| "learning_rate": 1.4611814657049257e-06, | |
| "loss": 0.3384, | |
| "step": 728 | |
| }, | |
| { | |
| "epoch": 2.329073482428115, | |
| "grad_norm": 0.5958140430045699, | |
| "learning_rate": 1.4480735526452427e-06, | |
| "loss": 0.3172, | |
| "step": 729 | |
| }, | |
| { | |
| "epoch": 2.3322683706070286, | |
| "grad_norm": 0.641656670755964, | |
| "learning_rate": 1.4350147360016743e-06, | |
| "loss": 0.3036, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 2.3354632587859427, | |
| "grad_norm": 0.5444493599912078, | |
| "learning_rate": 1.4220051962793952e-06, | |
| "loss": 0.3685, | |
| "step": 731 | |
| }, | |
| { | |
| "epoch": 2.3386581469648564, | |
| "grad_norm": 0.6108367672947721, | |
| "learning_rate": 1.4090451133024473e-06, | |
| "loss": 0.3594, | |
| "step": 732 | |
| }, | |
| { | |
| "epoch": 2.34185303514377, | |
| "grad_norm": 0.5799568994930356, | |
| "learning_rate": 1.3961346662112585e-06, | |
| "loss": 0.3455, | |
| "step": 733 | |
| }, | |
| { | |
| "epoch": 2.3450479233226837, | |
| "grad_norm": 0.5895952335522295, | |
| "learning_rate": 1.3832740334601692e-06, | |
| "loss": 0.3454, | |
| "step": 734 | |
| }, | |
| { | |
| "epoch": 2.3482428115015974, | |
| "grad_norm": 0.5631879419117699, | |
| "learning_rate": 1.3704633928149575e-06, | |
| "loss": 0.3494, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 2.351437699680511, | |
| "grad_norm": 0.5764377352709202, | |
| "learning_rate": 1.3577029213503911e-06, | |
| "loss": 0.3253, | |
| "step": 736 | |
| }, | |
| { | |
| "epoch": 2.3546325878594248, | |
| "grad_norm": 0.5881785734208798, | |
| "learning_rate": 1.3449927954477732e-06, | |
| "loss": 0.3962, | |
| "step": 737 | |
| }, | |
| { | |
| "epoch": 2.357827476038339, | |
| "grad_norm": 0.6095133753271015, | |
| "learning_rate": 1.3323331907925046e-06, | |
| "loss": 0.323, | |
| "step": 738 | |
| }, | |
| { | |
| "epoch": 2.3610223642172525, | |
| "grad_norm": 0.588572224896148, | |
| "learning_rate": 1.319724282371664e-06, | |
| "loss": 0.3189, | |
| "step": 739 | |
| }, | |
| { | |
| "epoch": 2.364217252396166, | |
| "grad_norm": 0.5882639679146385, | |
| "learning_rate": 1.307166244471576e-06, | |
| "loss": 0.3057, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 2.36741214057508, | |
| "grad_norm": 0.6100840603730344, | |
| "learning_rate": 1.2946592506754097e-06, | |
| "loss": 0.3168, | |
| "step": 741 | |
| }, | |
| { | |
| "epoch": 2.3706070287539935, | |
| "grad_norm": 0.531991241724588, | |
| "learning_rate": 1.282203473860783e-06, | |
| "loss": 0.3474, | |
| "step": 742 | |
| }, | |
| { | |
| "epoch": 2.373801916932907, | |
| "grad_norm": 0.5856809075810936, | |
| "learning_rate": 1.2697990861973635e-06, | |
| "loss": 0.3608, | |
| "step": 743 | |
| }, | |
| { | |
| "epoch": 2.376996805111821, | |
| "grad_norm": 0.6568708624300119, | |
| "learning_rate": 1.257446259144494e-06, | |
| "loss": 0.3243, | |
| "step": 744 | |
| }, | |
| { | |
| "epoch": 2.380191693290735, | |
| "grad_norm": 0.5645315607240472, | |
| "learning_rate": 1.2451451634488264e-06, | |
| "loss": 0.3398, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 2.3833865814696487, | |
| "grad_norm": 0.6351309420537445, | |
| "learning_rate": 1.2328959691419517e-06, | |
| "loss": 0.3241, | |
| "step": 746 | |
| }, | |
| { | |
| "epoch": 2.3865814696485623, | |
| "grad_norm": 0.6014845684937911, | |
| "learning_rate": 1.2206988455380558e-06, | |
| "loss": 0.3881, | |
| "step": 747 | |
| }, | |
| { | |
| "epoch": 2.389776357827476, | |
| "grad_norm": 0.5738972638891255, | |
| "learning_rate": 1.2085539612315844e-06, | |
| "loss": 0.3812, | |
| "step": 748 | |
| }, | |
| { | |
| "epoch": 2.3929712460063897, | |
| "grad_norm": 0.6190610431226268, | |
| "learning_rate": 1.1964614840949002e-06, | |
| "loss": 0.2771, | |
| "step": 749 | |
| }, | |
| { | |
| "epoch": 2.3961661341853033, | |
| "grad_norm": 0.6683751164215797, | |
| "learning_rate": 1.1844215812759708e-06, | |
| "loss": 0.3456, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 2.3993610223642174, | |
| "grad_norm": 0.5735299842922226, | |
| "learning_rate": 1.1724344191960591e-06, | |
| "loss": 0.3288, | |
| "step": 751 | |
| }, | |
| { | |
| "epoch": 2.402555910543131, | |
| "grad_norm": 0.5263173276571057, | |
| "learning_rate": 1.1605001635474183e-06, | |
| "loss": 0.3329, | |
| "step": 752 | |
| }, | |
| { | |
| "epoch": 2.405750798722045, | |
| "grad_norm": 0.546286668746822, | |
| "learning_rate": 1.1486189792910024e-06, | |
| "loss": 0.3392, | |
| "step": 753 | |
| }, | |
| { | |
| "epoch": 2.4089456869009584, | |
| "grad_norm": 0.5782538648080976, | |
| "learning_rate": 1.1367910306541918e-06, | |
| "loss": 0.3269, | |
| "step": 754 | |
| }, | |
| { | |
| "epoch": 2.412140575079872, | |
| "grad_norm": 0.6093496989550087, | |
| "learning_rate": 1.1250164811285148e-06, | |
| "loss": 0.3638, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 2.415335463258786, | |
| "grad_norm": 0.5735979784153206, | |
| "learning_rate": 1.1132954934673911e-06, | |
| "loss": 0.3791, | |
| "step": 756 | |
| }, | |
| { | |
| "epoch": 2.4185303514377, | |
| "grad_norm": 0.5890688098840547, | |
| "learning_rate": 1.1016282296838887e-06, | |
| "loss": 0.32, | |
| "step": 757 | |
| }, | |
| { | |
| "epoch": 2.4217252396166136, | |
| "grad_norm": 0.5863770137863203, | |
| "learning_rate": 1.090014851048473e-06, | |
| "loss": 0.3568, | |
| "step": 758 | |
| }, | |
| { | |
| "epoch": 2.4249201277955272, | |
| "grad_norm": 0.6198422389021179, | |
| "learning_rate": 1.078455518086784e-06, | |
| "loss": 0.3503, | |
| "step": 759 | |
| }, | |
| { | |
| "epoch": 2.428115015974441, | |
| "grad_norm": 0.6182003603538643, | |
| "learning_rate": 1.0669503905774198e-06, | |
| "loss": 0.276, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 2.4313099041533546, | |
| "grad_norm": 0.5741243544821852, | |
| "learning_rate": 1.055499627549722e-06, | |
| "loss": 0.3543, | |
| "step": 761 | |
| }, | |
| { | |
| "epoch": 2.4345047923322682, | |
| "grad_norm": 0.6100024382594507, | |
| "learning_rate": 1.0441033872815804e-06, | |
| "loss": 0.3125, | |
| "step": 762 | |
| }, | |
| { | |
| "epoch": 2.437699680511182, | |
| "grad_norm": 0.5573545364538265, | |
| "learning_rate": 1.0327618272972484e-06, | |
| "loss": 0.3568, | |
| "step": 763 | |
| }, | |
| { | |
| "epoch": 2.440894568690096, | |
| "grad_norm": 0.5804442588333518, | |
| "learning_rate": 1.0214751043651582e-06, | |
| "loss": 0.3516, | |
| "step": 764 | |
| }, | |
| { | |
| "epoch": 2.4440894568690097, | |
| "grad_norm": 0.5645715954304487, | |
| "learning_rate": 1.010243374495763e-06, | |
| "loss": 0.3364, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 2.4472843450479234, | |
| "grad_norm": 0.607743565693485, | |
| "learning_rate": 9.990667929393715e-07, | |
| "loss": 0.3504, | |
| "step": 766 | |
| }, | |
| { | |
| "epoch": 2.450479233226837, | |
| "grad_norm": 0.5358496244943224, | |
| "learning_rate": 9.879455141840067e-07, | |
| "loss": 0.3134, | |
| "step": 767 | |
| }, | |
| { | |
| "epoch": 2.4536741214057507, | |
| "grad_norm": 0.5391020845721134, | |
| "learning_rate": 9.768796919532742e-07, | |
| "loss": 0.3583, | |
| "step": 768 | |
| }, | |
| { | |
| "epoch": 2.4568690095846644, | |
| "grad_norm": 0.6072255362664878, | |
| "learning_rate": 9.658694792042284e-07, | |
| "loss": 0.3071, | |
| "step": 769 | |
| }, | |
| { | |
| "epoch": 2.460063897763578, | |
| "grad_norm": 0.5743396369605449, | |
| "learning_rate": 9.549150281252633e-07, | |
| "loss": 0.3462, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 2.463258785942492, | |
| "grad_norm": 0.5900799064692686, | |
| "learning_rate": 9.440164901340127e-07, | |
| "loss": 0.3449, | |
| "step": 771 | |
| }, | |
| { | |
| "epoch": 2.466453674121406, | |
| "grad_norm": 0.6200473626128713, | |
| "learning_rate": 9.331740158752495e-07, | |
| "loss": 0.319, | |
| "step": 772 | |
| }, | |
| { | |
| "epoch": 2.4696485623003195, | |
| "grad_norm": 0.548623410679532, | |
| "learning_rate": 9.223877552188065e-07, | |
| "loss": 0.3401, | |
| "step": 773 | |
| }, | |
| { | |
| "epoch": 2.472843450479233, | |
| "grad_norm": 0.6543995742480747, | |
| "learning_rate": 9.116578572575091e-07, | |
| "loss": 0.285, | |
| "step": 774 | |
| }, | |
| { | |
| "epoch": 2.476038338658147, | |
| "grad_norm": 0.5299382551555714, | |
| "learning_rate": 9.009844703051063e-07, | |
| "loss": 0.3277, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 2.479233226837061, | |
| "grad_norm": 0.8119843967078852, | |
| "learning_rate": 8.903677418942292e-07, | |
| "loss": 0.3512, | |
| "step": 776 | |
| }, | |
| { | |
| "epoch": 2.4824281150159746, | |
| "grad_norm": 0.6285438913643876, | |
| "learning_rate": 8.79807818774343e-07, | |
| "loss": 0.2908, | |
| "step": 777 | |
| }, | |
| { | |
| "epoch": 2.4856230031948883, | |
| "grad_norm": 0.585663977782034, | |
| "learning_rate": 8.693048469097293e-07, | |
| "loss": 0.389, | |
| "step": 778 | |
| }, | |
| { | |
| "epoch": 2.488817891373802, | |
| "grad_norm": 0.6289592265394592, | |
| "learning_rate": 8.58858971477457e-07, | |
| "loss": 0.3521, | |
| "step": 779 | |
| }, | |
| { | |
| "epoch": 2.4920127795527156, | |
| "grad_norm": 0.6366693928238198, | |
| "learning_rate": 8.484703368653812e-07, | |
| "loss": 0.3701, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 2.4952076677316293, | |
| "grad_norm": 0.5782660026502604, | |
| "learning_rate": 8.381390866701517e-07, | |
| "loss": 0.3195, | |
| "step": 781 | |
| }, | |
| { | |
| "epoch": 2.498402555910543, | |
| "grad_norm": 0.5832900554761841, | |
| "learning_rate": 8.278653636952177e-07, | |
| "loss": 0.378, | |
| "step": 782 | |
| }, | |
| { | |
| "epoch": 2.501597444089457, | |
| "grad_norm": 0.5684336949228438, | |
| "learning_rate": 8.176493099488664e-07, | |
| "loss": 0.3276, | |
| "step": 783 | |
| }, | |
| { | |
| "epoch": 2.5047923322683707, | |
| "grad_norm": 0.5609191253603198, | |
| "learning_rate": 8.074910666422475e-07, | |
| "loss": 0.3247, | |
| "step": 784 | |
| }, | |
| { | |
| "epoch": 2.5079872204472844, | |
| "grad_norm": 0.5438570856558302, | |
| "learning_rate": 7.973907741874287e-07, | |
| "loss": 0.351, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 2.511182108626198, | |
| "grad_norm": 0.6219071230831505, | |
| "learning_rate": 7.873485721954572e-07, | |
| "loss": 0.2938, | |
| "step": 786 | |
| }, | |
| { | |
| "epoch": 2.5143769968051117, | |
| "grad_norm": 0.5617327223895177, | |
| "learning_rate": 7.773645994744222e-07, | |
| "loss": 0.335, | |
| "step": 787 | |
| }, | |
| { | |
| "epoch": 2.5175718849840254, | |
| "grad_norm": 0.5473340299935876, | |
| "learning_rate": 7.674389940275406e-07, | |
| "loss": 0.3505, | |
| "step": 788 | |
| }, | |
| { | |
| "epoch": 2.520766773162939, | |
| "grad_norm": 0.5651021204281824, | |
| "learning_rate": 7.575718930512516e-07, | |
| "loss": 0.2984, | |
| "step": 789 | |
| }, | |
| { | |
| "epoch": 2.523961661341853, | |
| "grad_norm": 0.5707528680423285, | |
| "learning_rate": 7.47763432933315e-07, | |
| "loss": 0.3519, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 2.527156549520767, | |
| "grad_norm": 0.5768375956828472, | |
| "learning_rate": 7.380137492509309e-07, | |
| "loss": 0.2979, | |
| "step": 791 | |
| }, | |
| { | |
| "epoch": 2.5303514376996805, | |
| "grad_norm": 0.5307202912335556, | |
| "learning_rate": 7.283229767688627e-07, | |
| "loss": 0.3365, | |
| "step": 792 | |
| }, | |
| { | |
| "epoch": 2.533546325878594, | |
| "grad_norm": 0.5711642876819738, | |
| "learning_rate": 7.186912494375736e-07, | |
| "loss": 0.3803, | |
| "step": 793 | |
| }, | |
| { | |
| "epoch": 2.536741214057508, | |
| "grad_norm": 0.5349026632263273, | |
| "learning_rate": 7.091187003913802e-07, | |
| "loss": 0.3239, | |
| "step": 794 | |
| }, | |
| { | |
| "epoch": 2.539936102236422, | |
| "grad_norm": 0.583012121489221, | |
| "learning_rate": 6.996054619466053e-07, | |
| "loss": 0.3392, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 2.543130990415335, | |
| "grad_norm": 0.5609705350332282, | |
| "learning_rate": 6.901516655997536e-07, | |
| "loss": 0.4193, | |
| "step": 796 | |
| }, | |
| { | |
| "epoch": 2.5463258785942493, | |
| "grad_norm": 0.594967536141309, | |
| "learning_rate": 6.80757442025694e-07, | |
| "loss": 0.3052, | |
| "step": 797 | |
| }, | |
| { | |
| "epoch": 2.549520766773163, | |
| "grad_norm": 0.5940460186489407, | |
| "learning_rate": 6.714229210758516e-07, | |
| "loss": 0.3529, | |
| "step": 798 | |
| }, | |
| { | |
| "epoch": 2.5527156549520766, | |
| "grad_norm": 0.5612092748543441, | |
| "learning_rate": 6.621482317764105e-07, | |
| "loss": 0.3472, | |
| "step": 799 | |
| }, | |
| { | |
| "epoch": 2.5559105431309903, | |
| "grad_norm": 0.5679709101998861, | |
| "learning_rate": 6.529335023265387e-07, | |
| "loss": 0.3357, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 2.559105431309904, | |
| "grad_norm": 0.5440415384971896, | |
| "learning_rate": 6.437788600966066e-07, | |
| "loss": 0.3111, | |
| "step": 801 | |
| }, | |
| { | |
| "epoch": 2.562300319488818, | |
| "grad_norm": 0.5964765736359275, | |
| "learning_rate": 6.346844316264312e-07, | |
| "loss": 0.3534, | |
| "step": 802 | |
| }, | |
| { | |
| "epoch": 2.5654952076677318, | |
| "grad_norm": 0.6245549454681938, | |
| "learning_rate": 6.256503426235277e-07, | |
| "loss": 0.3279, | |
| "step": 803 | |
| }, | |
| { | |
| "epoch": 2.5686900958466454, | |
| "grad_norm": 0.5850388477288754, | |
| "learning_rate": 6.166767179613691e-07, | |
| "loss": 0.3099, | |
| "step": 804 | |
| }, | |
| { | |
| "epoch": 2.571884984025559, | |
| "grad_norm": 0.5637280336385339, | |
| "learning_rate": 6.077636816776611e-07, | |
| "loss": 0.2819, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 2.5750798722044728, | |
| "grad_norm": 0.6176991831171035, | |
| "learning_rate": 5.989113569726312e-07, | |
| "loss": 0.3039, | |
| "step": 806 | |
| }, | |
| { | |
| "epoch": 2.5782747603833864, | |
| "grad_norm": 0.71187271410236, | |
| "learning_rate": 5.901198662073188e-07, | |
| "loss": 0.315, | |
| "step": 807 | |
| }, | |
| { | |
| "epoch": 2.5814696485623, | |
| "grad_norm": 0.5821053609324206, | |
| "learning_rate": 5.813893309018881e-07, | |
| "loss": 0.3552, | |
| "step": 808 | |
| }, | |
| { | |
| "epoch": 2.584664536741214, | |
| "grad_norm": 0.6223346311833248, | |
| "learning_rate": 5.727198717339511e-07, | |
| "loss": 0.2929, | |
| "step": 809 | |
| }, | |
| { | |
| "epoch": 2.587859424920128, | |
| "grad_norm": 0.584315668054316, | |
| "learning_rate": 5.641116085368931e-07, | |
| "loss": 0.317, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 2.5910543130990416, | |
| "grad_norm": 0.5746440164268632, | |
| "learning_rate": 5.555646602982207e-07, | |
| "loss": 0.4032, | |
| "step": 811 | |
| }, | |
| { | |
| "epoch": 2.594249201277955, | |
| "grad_norm": 0.587232658805564, | |
| "learning_rate": 5.470791451579172e-07, | |
| "loss": 0.3184, | |
| "step": 812 | |
| }, | |
| { | |
| "epoch": 2.597444089456869, | |
| "grad_norm": 0.5864420347378898, | |
| "learning_rate": 5.386551804068063e-07, | |
| "loss": 0.3602, | |
| "step": 813 | |
| }, | |
| { | |
| "epoch": 2.600638977635783, | |
| "grad_norm": 0.5302312882381615, | |
| "learning_rate": 5.302928824849335e-07, | |
| "loss": 0.3254, | |
| "step": 814 | |
| }, | |
| { | |
| "epoch": 2.6038338658146962, | |
| "grad_norm": 0.6004537769625552, | |
| "learning_rate": 5.219923669799587e-07, | |
| "loss": 0.3542, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 2.6070287539936103, | |
| "grad_norm": 0.6579884319558386, | |
| "learning_rate": 5.137537486255517e-07, | |
| "loss": 0.3635, | |
| "step": 816 | |
| }, | |
| { | |
| "epoch": 2.610223642172524, | |
| "grad_norm": 0.5717327429721044, | |
| "learning_rate": 5.055771412998122e-07, | |
| "loss": 0.355, | |
| "step": 817 | |
| }, | |
| { | |
| "epoch": 2.6134185303514377, | |
| "grad_norm": 0.5435195536221277, | |
| "learning_rate": 4.974626580236957e-07, | |
| "loss": 0.3727, | |
| "step": 818 | |
| }, | |
| { | |
| "epoch": 2.6166134185303513, | |
| "grad_norm": 0.5452473807702689, | |
| "learning_rate": 4.894104109594466e-07, | |
| "loss": 0.3415, | |
| "step": 819 | |
| }, | |
| { | |
| "epoch": 2.619808306709265, | |
| "grad_norm": 0.5614197697797106, | |
| "learning_rate": 4.814205114090543e-07, | |
| "loss": 0.353, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 2.623003194888179, | |
| "grad_norm": 0.6036099876529709, | |
| "learning_rate": 4.734930698127077e-07, | |
| "loss": 0.3329, | |
| "step": 821 | |
| }, | |
| { | |
| "epoch": 2.626198083067093, | |
| "grad_norm": 0.5610318957311481, | |
| "learning_rate": 4.6562819574727304e-07, | |
| "loss": 0.3557, | |
| "step": 822 | |
| }, | |
| { | |
| "epoch": 2.6293929712460065, | |
| "grad_norm": 0.525351462374241, | |
| "learning_rate": 4.578259979247801e-07, | |
| "loss": 0.3385, | |
| "step": 823 | |
| }, | |
| { | |
| "epoch": 2.63258785942492, | |
| "grad_norm": 0.6776441821920868, | |
| "learning_rate": 4.500865841909169e-07, | |
| "loss": 0.342, | |
| "step": 824 | |
| }, | |
| { | |
| "epoch": 2.635782747603834, | |
| "grad_norm": 0.5299009647564896, | |
| "learning_rate": 4.4241006152353885e-07, | |
| "loss": 0.3793, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 2.6389776357827475, | |
| "grad_norm": 0.5369364999737113, | |
| "learning_rate": 4.3479653603119287e-07, | |
| "loss": 0.381, | |
| "step": 826 | |
| }, | |
| { | |
| "epoch": 2.642172523961661, | |
| "grad_norm": 0.5920800154248469, | |
| "learning_rate": 4.2724611295164755e-07, | |
| "loss": 0.3242, | |
| "step": 827 | |
| }, | |
| { | |
| "epoch": 2.6453674121405752, | |
| "grad_norm": 0.5663358573294414, | |
| "learning_rate": 4.197588966504401e-07, | |
| "loss": 0.3463, | |
| "step": 828 | |
| }, | |
| { | |
| "epoch": 2.648562300319489, | |
| "grad_norm": 0.5471794089375958, | |
| "learning_rate": 4.123349906194357e-07, | |
| "loss": 0.3375, | |
| "step": 829 | |
| }, | |
| { | |
| "epoch": 2.6517571884984026, | |
| "grad_norm": 0.6014709675824612, | |
| "learning_rate": 4.0497449747539217e-07, | |
| "loss": 0.3686, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 2.6549520766773163, | |
| "grad_norm": 0.6082660885321246, | |
| "learning_rate": 3.9767751895854467e-07, | |
| "loss": 0.3165, | |
| "step": 831 | |
| }, | |
| { | |
| "epoch": 2.65814696485623, | |
| "grad_norm": 0.5376902168917983, | |
| "learning_rate": 3.904441559312006e-07, | |
| "loss": 0.3175, | |
| "step": 832 | |
| }, | |
| { | |
| "epoch": 2.661341853035144, | |
| "grad_norm": 0.5349176977828113, | |
| "learning_rate": 3.8327450837634284e-07, | |
| "loss": 0.3153, | |
| "step": 833 | |
| }, | |
| { | |
| "epoch": 2.6645367412140573, | |
| "grad_norm": 0.5793405702321877, | |
| "learning_rate": 3.7616867539624733e-07, | |
| "loss": 0.3516, | |
| "step": 834 | |
| }, | |
| { | |
| "epoch": 2.6677316293929714, | |
| "grad_norm": 0.5410240793370933, | |
| "learning_rate": 3.691267552111183e-07, | |
| "loss": 0.4026, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 2.670926517571885, | |
| "grad_norm": 0.5694849372788428, | |
| "learning_rate": 3.621488451577221e-07, | |
| "loss": 0.3407, | |
| "step": 836 | |
| }, | |
| { | |
| "epoch": 2.6741214057507987, | |
| "grad_norm": 0.5975521361869505, | |
| "learning_rate": 3.552350416880507e-07, | |
| "loss": 0.3373, | |
| "step": 837 | |
| }, | |
| { | |
| "epoch": 2.6773162939297124, | |
| "grad_norm": 0.569768311072208, | |
| "learning_rate": 3.483854403679832e-07, | |
| "loss": 0.3297, | |
| "step": 838 | |
| }, | |
| { | |
| "epoch": 2.680511182108626, | |
| "grad_norm": 0.6903061046518503, | |
| "learning_rate": 3.416001358759635e-07, | |
| "loss": 0.2955, | |
| "step": 839 | |
| }, | |
| { | |
| "epoch": 2.68370607028754, | |
| "grad_norm": 0.5841017513426691, | |
| "learning_rate": 3.3487922200169944e-07, | |
| "loss": 0.358, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 2.686900958466454, | |
| "grad_norm": 0.545896489919343, | |
| "learning_rate": 3.2822279164485494e-07, | |
| "loss": 0.3632, | |
| "step": 841 | |
| }, | |
| { | |
| "epoch": 2.6900958466453675, | |
| "grad_norm": 0.6588046443317735, | |
| "learning_rate": 3.2163093681377765e-07, | |
| "loss": 0.3551, | |
| "step": 842 | |
| }, | |
| { | |
| "epoch": 2.693290734824281, | |
| "grad_norm": 0.5748802973182505, | |
| "learning_rate": 3.151037486242181e-07, | |
| "loss": 0.3248, | |
| "step": 843 | |
| }, | |
| { | |
| "epoch": 2.696485623003195, | |
| "grad_norm": 0.5908314428457524, | |
| "learning_rate": 3.08641317298074e-07, | |
| "loss": 0.2885, | |
| "step": 844 | |
| }, | |
| { | |
| "epoch": 2.6996805111821085, | |
| "grad_norm": 0.5528724107582127, | |
| "learning_rate": 3.022437321621452e-07, | |
| "loss": 0.352, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 2.702875399361022, | |
| "grad_norm": 0.5585598362246621, | |
| "learning_rate": 2.959110816468935e-07, | |
| "loss": 0.3332, | |
| "step": 846 | |
| }, | |
| { | |
| "epoch": 2.7060702875399363, | |
| "grad_norm": 0.6050385908159153, | |
| "learning_rate": 2.896434532852277e-07, | |
| "loss": 0.3136, | |
| "step": 847 | |
| }, | |
| { | |
| "epoch": 2.70926517571885, | |
| "grad_norm": 0.6064065560927091, | |
| "learning_rate": 2.834409337112842e-07, | |
| "loss": 0.3533, | |
| "step": 848 | |
| }, | |
| { | |
| "epoch": 2.7124600638977636, | |
| "grad_norm": 0.5280435683159072, | |
| "learning_rate": 2.7730360865923954e-07, | |
| "loss": 0.3252, | |
| "step": 849 | |
| }, | |
| { | |
| "epoch": 2.7156549520766773, | |
| "grad_norm": 0.578523313304179, | |
| "learning_rate": 2.712315629621176e-07, | |
| "loss": 0.3023, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 2.718849840255591, | |
| "grad_norm": 0.6163076832373001, | |
| "learning_rate": 2.6522488055062076e-07, | |
| "loss": 0.3341, | |
| "step": 851 | |
| }, | |
| { | |
| "epoch": 2.722044728434505, | |
| "grad_norm": 0.6117384691824501, | |
| "learning_rate": 2.5928364445196975e-07, | |
| "loss": 0.3132, | |
| "step": 852 | |
| }, | |
| { | |
| "epoch": 2.7252396166134183, | |
| "grad_norm": 0.5703662607136886, | |
| "learning_rate": 2.534079367887549e-07, | |
| "loss": 0.3251, | |
| "step": 853 | |
| }, | |
| { | |
| "epoch": 2.7284345047923324, | |
| "grad_norm": 0.6190014779819151, | |
| "learning_rate": 2.475978387778e-07, | |
| "loss": 0.3569, | |
| "step": 854 | |
| }, | |
| { | |
| "epoch": 2.731629392971246, | |
| "grad_norm": 0.5604661408318365, | |
| "learning_rate": 2.4185343072904376e-07, | |
| "loss": 0.3395, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 2.7348242811501597, | |
| "grad_norm": 0.5489428613929013, | |
| "learning_rate": 2.3617479204442462e-07, | |
| "loss": 0.3313, | |
| "step": 856 | |
| }, | |
| { | |
| "epoch": 2.7380191693290734, | |
| "grad_norm": 0.5534611059961758, | |
| "learning_rate": 2.305620012167853e-07, | |
| "loss": 0.3172, | |
| "step": 857 | |
| }, | |
| { | |
| "epoch": 2.741214057507987, | |
| "grad_norm": 0.5769562053475937, | |
| "learning_rate": 2.2501513582879108e-07, | |
| "loss": 0.3466, | |
| "step": 858 | |
| }, | |
| { | |
| "epoch": 2.744408945686901, | |
| "grad_norm": 0.54615446896986, | |
| "learning_rate": 2.1953427255185122e-07, | |
| "loss": 0.3582, | |
| "step": 859 | |
| }, | |
| { | |
| "epoch": 2.747603833865815, | |
| "grad_norm": 0.5834092046516896, | |
| "learning_rate": 2.1411948714506414e-07, | |
| "loss": 0.3887, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 2.7507987220447285, | |
| "grad_norm": 0.5516422872250117, | |
| "learning_rate": 2.0877085445416889e-07, | |
| "loss": 0.3605, | |
| "step": 861 | |
| }, | |
| { | |
| "epoch": 2.753993610223642, | |
| "grad_norm": 0.6121681780880922, | |
| "learning_rate": 2.034884484105093e-07, | |
| "loss": 0.3498, | |
| "step": 862 | |
| }, | |
| { | |
| "epoch": 2.757188498402556, | |
| "grad_norm": 0.5662039053316421, | |
| "learning_rate": 1.98272342030012e-07, | |
| "loss": 0.375, | |
| "step": 863 | |
| }, | |
| { | |
| "epoch": 2.7603833865814695, | |
| "grad_norm": 0.6474986303524936, | |
| "learning_rate": 1.9312260741218114e-07, | |
| "loss": 0.3808, | |
| "step": 864 | |
| }, | |
| { | |
| "epoch": 2.763578274760383, | |
| "grad_norm": 0.6092793057060697, | |
| "learning_rate": 1.8803931573909584e-07, | |
| "loss": 0.2861, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 2.7667731629392973, | |
| "grad_norm": 0.5653652953607464, | |
| "learning_rate": 1.8302253727443041e-07, | |
| "loss": 0.3665, | |
| "step": 866 | |
| }, | |
| { | |
| "epoch": 2.769968051118211, | |
| "grad_norm": 0.5262527951801934, | |
| "learning_rate": 1.7807234136248296e-07, | |
| "loss": 0.348, | |
| "step": 867 | |
| }, | |
| { | |
| "epoch": 2.7731629392971247, | |
| "grad_norm": 0.6010527895059987, | |
| "learning_rate": 1.731887964272144e-07, | |
| "loss": 0.3269, | |
| "step": 868 | |
| }, | |
| { | |
| "epoch": 2.7763578274760383, | |
| "grad_norm": 0.5152094861239386, | |
| "learning_rate": 1.6837196997130434e-07, | |
| "loss": 0.3454, | |
| "step": 869 | |
| }, | |
| { | |
| "epoch": 2.779552715654952, | |
| "grad_norm": 0.5909709681738667, | |
| "learning_rate": 1.6362192857521942e-07, | |
| "loss": 0.344, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 2.7827476038338657, | |
| "grad_norm": 0.5029070496695086, | |
| "learning_rate": 1.5893873789628812e-07, | |
| "loss": 0.3648, | |
| "step": 871 | |
| }, | |
| { | |
| "epoch": 2.7859424920127793, | |
| "grad_norm": 0.5577303918259516, | |
| "learning_rate": 1.5432246266780083e-07, | |
| "loss": 0.3716, | |
| "step": 872 | |
| }, | |
| { | |
| "epoch": 2.7891373801916934, | |
| "grad_norm": 0.5665556944271241, | |
| "learning_rate": 1.4977316669810782e-07, | |
| "loss": 0.3611, | |
| "step": 873 | |
| }, | |
| { | |
| "epoch": 2.792332268370607, | |
| "grad_norm": 0.543905991284816, | |
| "learning_rate": 1.4529091286973994e-07, | |
| "loss": 0.3035, | |
| "step": 874 | |
| }, | |
| { | |
| "epoch": 2.7955271565495208, | |
| "grad_norm": 0.6350011040549947, | |
| "learning_rate": 1.4087576313854212e-07, | |
| "loss": 0.3211, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 2.7987220447284344, | |
| "grad_norm": 0.589866349573948, | |
| "learning_rate": 1.365277785328123e-07, | |
| "loss": 0.3134, | |
| "step": 876 | |
| }, | |
| { | |
| "epoch": 2.801916932907348, | |
| "grad_norm": 0.5322922994710118, | |
| "learning_rate": 1.3224701915246053e-07, | |
| "loss": 0.3123, | |
| "step": 877 | |
| }, | |
| { | |
| "epoch": 2.8051118210862622, | |
| "grad_norm": 0.5583751783696411, | |
| "learning_rate": 1.280335441681796e-07, | |
| "loss": 0.3051, | |
| "step": 878 | |
| }, | |
| { | |
| "epoch": 2.8083067092651754, | |
| "grad_norm": 0.5538379069248649, | |
| "learning_rate": 1.2388741182062348e-07, | |
| "loss": 0.3642, | |
| "step": 879 | |
| }, | |
| { | |
| "epoch": 2.8115015974440896, | |
| "grad_norm": 0.6159417305152532, | |
| "learning_rate": 1.198086794196035e-07, | |
| "loss": 0.3348, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 2.8146964856230032, | |
| "grad_norm": 0.5898313061788408, | |
| "learning_rate": 1.1579740334330014e-07, | |
| "loss": 0.3, | |
| "step": 881 | |
| }, | |
| { | |
| "epoch": 2.817891373801917, | |
| "grad_norm": 0.5018089700878364, | |
| "learning_rate": 1.1185363903747748e-07, | |
| "loss": 0.3716, | |
| "step": 882 | |
| }, | |
| { | |
| "epoch": 2.8210862619808306, | |
| "grad_norm": 0.6168872718383936, | |
| "learning_rate": 1.0797744101472052e-07, | |
| "loss": 0.3369, | |
| "step": 883 | |
| }, | |
| { | |
| "epoch": 2.8242811501597442, | |
| "grad_norm": 0.5505660776029496, | |
| "learning_rate": 1.0416886285368188e-07, | |
| "loss": 0.3703, | |
| "step": 884 | |
| }, | |
| { | |
| "epoch": 2.8274760383386583, | |
| "grad_norm": 0.5300963985701472, | |
| "learning_rate": 1.0042795719833964e-07, | |
| "loss": 0.4103, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 2.830670926517572, | |
| "grad_norm": 0.6561731290395931, | |
| "learning_rate": 9.675477575726954e-08, | |
| "loss": 0.332, | |
| "step": 886 | |
| }, | |
| { | |
| "epoch": 2.8338658146964857, | |
| "grad_norm": 0.5544282515328289, | |
| "learning_rate": 9.314936930293283e-08, | |
| "loss": 0.3053, | |
| "step": 887 | |
| }, | |
| { | |
| "epoch": 2.8370607028753994, | |
| "grad_norm": 0.5235068301613184, | |
| "learning_rate": 8.961178767097178e-08, | |
| "loss": 0.3522, | |
| "step": 888 | |
| }, | |
| { | |
| "epoch": 2.840255591054313, | |
| "grad_norm": 0.6769353156025437, | |
| "learning_rate": 8.614207975952083e-08, | |
| "loss": 0.3801, | |
| "step": 889 | |
| }, | |
| { | |
| "epoch": 2.8434504792332267, | |
| "grad_norm": 0.5446946980464149, | |
| "learning_rate": 8.274029352853264e-08, | |
| "loss": 0.332, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 2.8466453674121404, | |
| "grad_norm": 0.49142751252317257, | |
| "learning_rate": 7.940647599911477e-08, | |
| "loss": 0.3462, | |
| "step": 891 | |
| }, | |
| { | |
| "epoch": 2.8498402555910545, | |
| "grad_norm": 0.5256070158129452, | |
| "learning_rate": 7.614067325287632e-08, | |
| "loss": 0.3708, | |
| "step": 892 | |
| }, | |
| { | |
| "epoch": 2.853035143769968, | |
| "grad_norm": 0.6187672006800049, | |
| "learning_rate": 7.294293043129785e-08, | |
| "loss": 0.3342, | |
| "step": 893 | |
| }, | |
| { | |
| "epoch": 2.856230031948882, | |
| "grad_norm": 0.5653221557610779, | |
| "learning_rate": 6.981329173509909e-08, | |
| "loss": 0.3443, | |
| "step": 894 | |
| }, | |
| { | |
| "epoch": 2.8594249201277955, | |
| "grad_norm": 1.1954807605844084, | |
| "learning_rate": 6.675180042363505e-08, | |
| "loss": 0.3882, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 2.862619808306709, | |
| "grad_norm": 0.5765003588637598, | |
| "learning_rate": 6.375849881429418e-08, | |
| "loss": 0.3061, | |
| "step": 896 | |
| }, | |
| { | |
| "epoch": 2.8658146964856233, | |
| "grad_norm": 0.5165502215161434, | |
| "learning_rate": 6.083342828191453e-08, | |
| "loss": 0.3934, | |
| "step": 897 | |
| }, | |
| { | |
| "epoch": 2.8690095846645365, | |
| "grad_norm": 0.5579060004933497, | |
| "learning_rate": 5.797662925821068e-08, | |
| "loss": 0.364, | |
| "step": 898 | |
| }, | |
| { | |
| "epoch": 2.8722044728434506, | |
| "grad_norm": 0.5357705818949167, | |
| "learning_rate": 5.518814123121885e-08, | |
| "loss": 0.333, | |
| "step": 899 | |
| }, | |
| { | |
| "epoch": 2.8753993610223643, | |
| "grad_norm": 0.6215205130389762, | |
| "learning_rate": 5.246800274474439e-08, | |
| "loss": 0.3215, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 2.878594249201278, | |
| "grad_norm": 0.5362725801320283, | |
| "learning_rate": 4.981625139783619e-08, | |
| "loss": 0.3625, | |
| "step": 901 | |
| }, | |
| { | |
| "epoch": 2.8817891373801916, | |
| "grad_norm": 0.5832017290942086, | |
| "learning_rate": 4.723292384426203e-08, | |
| "loss": 0.4049, | |
| "step": 902 | |
| }, | |
| { | |
| "epoch": 2.8849840255591053, | |
| "grad_norm": 0.6011616287171958, | |
| "learning_rate": 4.471805579200239e-08, | |
| "loss": 0.3256, | |
| "step": 903 | |
| }, | |
| { | |
| "epoch": 2.8881789137380194, | |
| "grad_norm": 0.5601944631354882, | |
| "learning_rate": 4.227168200276077e-08, | |
| "loss": 0.3686, | |
| "step": 904 | |
| }, | |
| { | |
| "epoch": 2.891373801916933, | |
| "grad_norm": 0.5472469736396958, | |
| "learning_rate": 3.989383629147747e-08, | |
| "loss": 0.3418, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 2.8945686900958467, | |
| "grad_norm": 0.5338171927335532, | |
| "learning_rate": 3.758455152586715e-08, | |
| "loss": 0.3715, | |
| "step": 906 | |
| }, | |
| { | |
| "epoch": 2.8977635782747604, | |
| "grad_norm": 0.6101596416839852, | |
| "learning_rate": 3.534385962596143e-08, | |
| "loss": 0.3564, | |
| "step": 907 | |
| }, | |
| { | |
| "epoch": 2.900958466453674, | |
| "grad_norm": 0.5311824758840752, | |
| "learning_rate": 3.3171791563669785e-08, | |
| "loss": 0.3974, | |
| "step": 908 | |
| }, | |
| { | |
| "epoch": 2.9041533546325877, | |
| "grad_norm": 0.5897397502333577, | |
| "learning_rate": 3.10683773623488e-08, | |
| "loss": 0.3643, | |
| "step": 909 | |
| }, | |
| { | |
| "epoch": 2.9073482428115014, | |
| "grad_norm": 0.5451862506343562, | |
| "learning_rate": 2.9033646096390255e-08, | |
| "loss": 0.4077, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 2.9105431309904155, | |
| "grad_norm": 0.537285429472973, | |
| "learning_rate": 2.706762589081646e-08, | |
| "loss": 0.3777, | |
| "step": 911 | |
| }, | |
| { | |
| "epoch": 2.913738019169329, | |
| "grad_norm": 0.553809502847126, | |
| "learning_rate": 2.517034392089446e-08, | |
| "loss": 0.3414, | |
| "step": 912 | |
| }, | |
| { | |
| "epoch": 2.916932907348243, | |
| "grad_norm": 0.5872150553419763, | |
| "learning_rate": 2.3341826411756863e-08, | |
| "loss": 0.3312, | |
| "step": 913 | |
| }, | |
| { | |
| "epoch": 2.9201277955271565, | |
| "grad_norm": 0.5982165518925496, | |
| "learning_rate": 2.158209863804217e-08, | |
| "loss": 0.3105, | |
| "step": 914 | |
| }, | |
| { | |
| "epoch": 2.92332268370607, | |
| "grad_norm": 0.5953312685899663, | |
| "learning_rate": 1.9891184923544472e-08, | |
| "loss": 0.2881, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 2.9265175718849843, | |
| "grad_norm": 0.5589989469340159, | |
| "learning_rate": 1.826910864087761e-08, | |
| "loss": 0.3441, | |
| "step": 916 | |
| }, | |
| { | |
| "epoch": 2.9297124600638975, | |
| "grad_norm": 0.5352148216822907, | |
| "learning_rate": 1.6715892211150442e-08, | |
| "loss": 0.3537, | |
| "step": 917 | |
| }, | |
| { | |
| "epoch": 2.9329073482428116, | |
| "grad_norm": 0.647867530843516, | |
| "learning_rate": 1.5231557103658755e-08, | |
| "loss": 0.2741, | |
| "step": 918 | |
| }, | |
| { | |
| "epoch": 2.9361022364217253, | |
| "grad_norm": 0.5517267321893599, | |
| "learning_rate": 1.3816123835588835e-08, | |
| "loss": 0.3679, | |
| "step": 919 | |
| }, | |
| { | |
| "epoch": 2.939297124600639, | |
| "grad_norm": 0.5804393153053172, | |
| "learning_rate": 1.2469611971731576e-08, | |
| "loss": 0.3643, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 2.9424920127795526, | |
| "grad_norm": 0.5637831632290987, | |
| "learning_rate": 1.1192040124214931e-08, | |
| "loss": 0.38, | |
| "step": 921 | |
| }, | |
| { | |
| "epoch": 2.9456869009584663, | |
| "grad_norm": 0.5791231995251553, | |
| "learning_rate": 9.983425952243552e-09, | |
| "loss": 0.362, | |
| "step": 922 | |
| }, | |
| { | |
| "epoch": 2.9488817891373804, | |
| "grad_norm": 0.613327768846829, | |
| "learning_rate": 8.84378616185788e-09, | |
| "loss": 0.3305, | |
| "step": 923 | |
| }, | |
| { | |
| "epoch": 2.952076677316294, | |
| "grad_norm": 0.523776663808406, | |
| "learning_rate": 7.773136505700995e-09, | |
| "loss": 0.3285, | |
| "step": 924 | |
| }, | |
| { | |
| "epoch": 2.9552715654952078, | |
| "grad_norm": 0.5372633451404858, | |
| "learning_rate": 6.7714917828004545e-09, | |
| "loss": 0.3553, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 2.9584664536741214, | |
| "grad_norm": 0.5997913801512937, | |
| "learning_rate": 5.838865838366792e-09, | |
| "loss": 0.329, | |
| "step": 926 | |
| }, | |
| { | |
| "epoch": 2.961661341853035, | |
| "grad_norm": 0.5572950173147985, | |
| "learning_rate": 4.975271563599227e-09, | |
| "loss": 0.3111, | |
| "step": 927 | |
| }, | |
| { | |
| "epoch": 2.9648562300319488, | |
| "grad_norm": 0.5826358090737692, | |
| "learning_rate": 4.180720895508028e-09, | |
| "loss": 0.3306, | |
| "step": 928 | |
| }, | |
| { | |
| "epoch": 2.9680511182108624, | |
| "grad_norm": 0.5738688144268712, | |
| "learning_rate": 3.4552248167507576e-09, | |
| "loss": 0.3519, | |
| "step": 929 | |
| }, | |
| { | |
| "epoch": 2.9712460063897765, | |
| "grad_norm": 0.5323772177269126, | |
| "learning_rate": 2.798793355478502e-09, | |
| "loss": 0.3854, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 2.97444089456869, | |
| "grad_norm": 0.5154651602015146, | |
| "learning_rate": 2.2114355851993175e-09, | |
| "loss": 0.3371, | |
| "step": 931 | |
| }, | |
| { | |
| "epoch": 2.977635782747604, | |
| "grad_norm": 0.5355099037607711, | |
| "learning_rate": 1.6931596246516636e-09, | |
| "loss": 0.3501, | |
| "step": 932 | |
| }, | |
| { | |
| "epoch": 2.9808306709265175, | |
| "grad_norm": 0.5195387643181241, | |
| "learning_rate": 1.24397263769227e-09, | |
| "loss": 0.3754, | |
| "step": 933 | |
| }, | |
| { | |
| "epoch": 2.984025559105431, | |
| "grad_norm": 0.5496579222846415, | |
| "learning_rate": 8.638808331973281e-10, | |
| "loss": 0.3486, | |
| "step": 934 | |
| }, | |
| { | |
| "epoch": 2.987220447284345, | |
| "grad_norm": 0.580032230336796, | |
| "learning_rate": 5.528894649758921e-10, | |
| "loss": 0.3533, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 2.9904153354632586, | |
| "grad_norm": 0.6028588307685283, | |
| "learning_rate": 3.1100283169938074e-10, | |
| "loss": 0.35, | |
| "step": 936 | |
| }, | |
| { | |
| "epoch": 2.9936102236421727, | |
| "grad_norm": 0.5597644612132677, | |
| "learning_rate": 1.3822427683884975e-10, | |
| "loss": 0.3851, | |
| "step": 937 | |
| }, | |
| { | |
| "epoch": 2.9968051118210863, | |
| "grad_norm": 0.5543081962122827, | |
| "learning_rate": 3.4556188622802964e-11, | |
| "loss": 0.3398, | |
| "step": 938 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "grad_norm": 0.5681028010288746, | |
| "learning_rate": 0.0, | |
| "loss": 0.3356, | |
| "step": 939 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "step": 939, | |
| "total_flos": 406391461183488.0, | |
| "train_loss": 0.43733476487973244, | |
| "train_runtime": 10132.2972, | |
| "train_samples_per_second": 2.96, | |
| "train_steps_per_second": 0.093 | |
| } | |
| ], | |
| "logging_steps": 1.0, | |
| "max_steps": 939, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 406391461183488.0, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |